var/home/core/zuul-output/0000755000175000017500000000000015072342061014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015072355026015476 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005045117315072355017017710 0ustar rootrootOct 11 02:46:27 crc systemd[1]: Starting Kubernetes Kubelet... Oct 11 02:46:27 crc restorecon[4720]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:27 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 11 02:46:28 crc restorecon[4720]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 11 02:46:29 crc kubenswrapper[4953]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 02:46:29 crc kubenswrapper[4953]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 11 02:46:29 crc kubenswrapper[4953]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 02:46:29 crc kubenswrapper[4953]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 02:46:29 crc kubenswrapper[4953]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 11 02:46:29 crc kubenswrapper[4953]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.522236 4953 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528765 4953 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528797 4953 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528809 4953 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528819 4953 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528830 4953 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528840 4953 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528849 4953 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528859 4953 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528869 4953 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528897 4953 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528908 4953 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528917 4953 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528926 4953 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528935 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528943 4953 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528954 4953 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528965 4953 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528975 4953 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528983 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528991 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.528999 4953 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529007 4953 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529015 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529023 4953 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529033 4953 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529043 4953 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529053 4953 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529063 4953 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529073 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529082 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529090 4953 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529098 4953 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529106 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529113 4953 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529122 4953 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529130 4953 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529137 4953 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529145 4953 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529154 4953 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529162 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529170 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529178 4953 feature_gate.go:330] unrecognized feature gate: Example Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.529185 4953 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530088 4953 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530107 4953 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530116 4953 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530125 4953 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530134 4953 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530143 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530151 4953 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530158 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530166 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530174 4953 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530182 4953 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530189 4953 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530197 4953 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530204 4953 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530212 4953 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530220 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530227 4953 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530235 4953 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530242 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530250 4953 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530258 4953 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530265 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530272 4953 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530280 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530295 4953 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530305 4953 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530315 4953 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.530323 4953 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531416 4953 flags.go:64] FLAG: --address="0.0.0.0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531445 4953 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531461 4953 flags.go:64] FLAG: --anonymous-auth="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531472 4953 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531484 4953 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531494 4953 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531507 4953 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531518 4953 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531527 4953 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531536 4953 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531545 4953 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531555 4953 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531564 4953 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531573 4953 flags.go:64] FLAG: --cgroup-root="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531582 4953 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531591 4953 flags.go:64] FLAG: --client-ca-file="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531599 4953 flags.go:64] FLAG: --cloud-config="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531642 4953 flags.go:64] FLAG: --cloud-provider="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531653 4953 flags.go:64] FLAG: --cluster-dns="[]" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531666 4953 flags.go:64] FLAG: --cluster-domain="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531677 4953 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531689 4953 flags.go:64] FLAG: --config-dir="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531699 4953 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531709 4953 flags.go:64] FLAG: --container-log-max-files="5" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531721 4953 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531731 4953 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531741 4953 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531750 4953 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531759 4953 flags.go:64] FLAG: --contention-profiling="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531768 4953 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531777 4953 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531786 4953 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531795 4953 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531806 4953 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531815 4953 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531823 4953 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531832 4953 flags.go:64] FLAG: --enable-load-reader="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531841 4953 flags.go:64] FLAG: --enable-server="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531850 4953 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531862 4953 flags.go:64] FLAG: --event-burst="100" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531871 4953 flags.go:64] FLAG: --event-qps="50" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531881 4953 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531890 4953 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531899 4953 flags.go:64] FLAG: --eviction-hard="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531910 4953 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531918 4953 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531927 4953 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531936 4953 flags.go:64] FLAG: --eviction-soft="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531945 4953 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531954 4953 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531962 4953 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531971 4953 flags.go:64] FLAG: --experimental-mounter-path="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531980 4953 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531989 4953 flags.go:64] FLAG: --fail-swap-on="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.531998 4953 flags.go:64] FLAG: --feature-gates="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532008 4953 flags.go:64] FLAG: --file-check-frequency="20s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532017 4953 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532028 4953 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532038 4953 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532047 4953 flags.go:64] FLAG: --healthz-port="10248" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532056 4953 flags.go:64] FLAG: --help="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532064 4953 flags.go:64] FLAG: --hostname-override="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532073 4953 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532082 4953 flags.go:64] FLAG: --http-check-frequency="20s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532091 4953 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532100 4953 flags.go:64] FLAG: --image-credential-provider-config="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532108 4953 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532117 4953 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532126 4953 flags.go:64] FLAG: --image-service-endpoint="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532135 4953 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532143 4953 flags.go:64] FLAG: --kube-api-burst="100" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532152 4953 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532161 4953 flags.go:64] FLAG: --kube-api-qps="50" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532169 4953 flags.go:64] FLAG: --kube-reserved="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532179 4953 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532187 4953 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532196 4953 flags.go:64] FLAG: --kubelet-cgroups="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532206 4953 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532215 4953 flags.go:64] FLAG: --lock-file="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532224 4953 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532232 4953 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532241 4953 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532254 4953 flags.go:64] FLAG: --log-json-split-stream="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532263 4953 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532272 4953 flags.go:64] FLAG: --log-text-split-stream="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532281 4953 flags.go:64] FLAG: --logging-format="text" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532290 4953 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532299 4953 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532307 4953 flags.go:64] FLAG: --manifest-url="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532317 4953 flags.go:64] FLAG: --manifest-url-header="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532329 4953 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532338 4953 flags.go:64] FLAG: --max-open-files="1000000" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532349 4953 flags.go:64] FLAG: --max-pods="110" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532358 4953 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532367 4953 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532376 4953 flags.go:64] FLAG: --memory-manager-policy="None" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532385 4953 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532394 4953 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532402 4953 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532411 4953 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532431 4953 flags.go:64] FLAG: --node-status-max-images="50" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532441 4953 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532450 4953 flags.go:64] FLAG: --oom-score-adj="-999" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532458 4953 flags.go:64] FLAG: --pod-cidr="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532467 4953 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532479 4953 flags.go:64] FLAG: --pod-manifest-path="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532488 4953 flags.go:64] FLAG: --pod-max-pids="-1" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532497 4953 flags.go:64] FLAG: --pods-per-core="0" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532506 4953 flags.go:64] FLAG: --port="10250" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532515 4953 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532524 4953 flags.go:64] FLAG: --provider-id="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532533 4953 flags.go:64] FLAG: --qos-reserved="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532542 4953 flags.go:64] FLAG: --read-only-port="10255" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532552 4953 flags.go:64] FLAG: --register-node="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532560 4953 flags.go:64] FLAG: --register-schedulable="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532569 4953 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532584 4953 flags.go:64] FLAG: --registry-burst="10" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532593 4953 flags.go:64] FLAG: --registry-qps="5" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532631 4953 flags.go:64] FLAG: --reserved-cpus="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532641 4953 flags.go:64] FLAG: --reserved-memory="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532651 4953 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532660 4953 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532670 4953 flags.go:64] FLAG: --rotate-certificates="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532678 4953 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532687 4953 flags.go:64] FLAG: --runonce="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532695 4953 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532705 4953 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532714 4953 flags.go:64] FLAG: --seccomp-default="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532723 4953 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532732 4953 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532741 4953 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532750 4953 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532759 4953 flags.go:64] FLAG: --storage-driver-password="root" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532768 4953 flags.go:64] FLAG: --storage-driver-secure="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532777 4953 flags.go:64] FLAG: --storage-driver-table="stats" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532785 4953 flags.go:64] FLAG: --storage-driver-user="root" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532794 4953 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532803 4953 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532812 4953 flags.go:64] FLAG: --system-cgroups="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532820 4953 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532834 4953 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532842 4953 flags.go:64] FLAG: --tls-cert-file="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532851 4953 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532861 4953 flags.go:64] FLAG: --tls-min-version="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532870 4953 flags.go:64] FLAG: --tls-private-key-file="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532878 4953 flags.go:64] FLAG: --topology-manager-policy="none" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532887 4953 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532896 4953 flags.go:64] FLAG: --topology-manager-scope="container" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532904 4953 flags.go:64] FLAG: --v="2" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532916 4953 flags.go:64] FLAG: --version="false" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532928 4953 flags.go:64] FLAG: --vmodule="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532938 4953 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.532948 4953 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533180 4953 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533192 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533201 4953 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533210 4953 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533219 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533227 4953 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533235 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533242 4953 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533250 4953 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533258 4953 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533265 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533273 4953 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533281 4953 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533288 4953 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533296 4953 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533314 4953 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533322 4953 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533329 4953 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533337 4953 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533344 4953 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533353 4953 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533360 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533368 4953 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533376 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533383 4953 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533391 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533399 4953 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533406 4953 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533413 4953 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533421 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533428 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533436 4953 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533446 4953 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533453 4953 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533463 4953 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533475 4953 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533485 4953 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533493 4953 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533502 4953 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533510 4953 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533519 4953 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533527 4953 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533537 4953 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533547 4953 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533556 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533564 4953 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533573 4953 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533583 4953 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533591 4953 feature_gate.go:330] unrecognized feature gate: Example Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533599 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533635 4953 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533646 4953 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533655 4953 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533663 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533671 4953 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533679 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533688 4953 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533695 4953 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533704 4953 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533712 4953 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533722 4953 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533731 4953 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533741 4953 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533775 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533785 4953 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533794 4953 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533802 4953 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533810 4953 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533819 4953 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533828 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.533836 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.533859 4953 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.551004 4953 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.551060 4953 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551250 4953 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551269 4953 feature_gate.go:330] unrecognized feature gate: Example Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551282 4953 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551295 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551305 4953 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551316 4953 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551329 4953 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551342 4953 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551353 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551363 4953 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551374 4953 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551384 4953 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551393 4953 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551403 4953 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551413 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551424 4953 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551434 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551444 4953 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551453 4953 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551464 4953 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551473 4953 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551484 4953 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551495 4953 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551505 4953 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551515 4953 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551524 4953 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551534 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551544 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551553 4953 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551563 4953 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551573 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551586 4953 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551633 4953 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551647 4953 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551660 4953 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551672 4953 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551682 4953 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551693 4953 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551702 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551713 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551723 4953 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551733 4953 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551743 4953 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551753 4953 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551763 4953 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551774 4953 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551783 4953 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551793 4953 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551804 4953 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551814 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551823 4953 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551833 4953 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551842 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551853 4953 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551862 4953 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551872 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551885 4953 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551897 4953 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551907 4953 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551919 4953 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551933 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551945 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551956 4953 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551966 4953 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551976 4953 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551986 4953 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.551996 4953 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552006 4953 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552015 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552026 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552037 4953 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.552054 4953 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552388 4953 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552409 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552421 4953 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552433 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552443 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552455 4953 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552467 4953 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552479 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552490 4953 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552500 4953 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552514 4953 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552528 4953 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552541 4953 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552551 4953 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552563 4953 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552575 4953 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552586 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552598 4953 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552640 4953 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552651 4953 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552663 4953 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552675 4953 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552687 4953 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552699 4953 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552710 4953 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552720 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552729 4953 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552740 4953 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552750 4953 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552760 4953 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552770 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552780 4953 feature_gate.go:330] unrecognized feature gate: Example Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552790 4953 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552801 4953 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552811 4953 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552821 4953 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552830 4953 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552840 4953 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552851 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552862 4953 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552872 4953 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552882 4953 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552892 4953 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552905 4953 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552916 4953 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552926 4953 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552936 4953 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552946 4953 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552956 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552965 4953 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552975 4953 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552985 4953 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.552995 4953 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553007 4953 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553210 4953 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553221 4953 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553231 4953 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553241 4953 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553250 4953 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553260 4953 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553270 4953 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553281 4953 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553289 4953 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553297 4953 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553305 4953 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553313 4953 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553322 4953 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553335 4953 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553347 4953 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553359 4953 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.553370 4953 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.553385 4953 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.553719 4953 server.go:940] "Client rotation is on, will bootstrap in background" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.561034 4953 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.562016 4953 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.563765 4953 server.go:997] "Starting client certificate rotation" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.563816 4953 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.564046 4953 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-16 07:06:44.440568353 +0000 UTC Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.564179 4953 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 868h20m14.876395201s for next certificate rotation Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.592846 4953 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.596960 4953 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.624995 4953 log.go:25] "Validated CRI v1 runtime API" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.664640 4953 log.go:25] "Validated CRI v1 image API" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.666986 4953 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.675799 4953 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-11-02-39-17-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.675843 4953 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.691084 4953 manager.go:217] Machine: {Timestamp:2025-10-11 02:46:29.688230267 +0000 UTC m=+0.621317931 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:af776600-3675-4e95-bb2d-3199a948d066 BootID:0371f094-ffaa-4075-95ad-f84ddb50698e Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:55:c3:4d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:55:c3:4d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:f8:62:f1 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:69:f5:92 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:ae:c0:51 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:39:5e:fc Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:70:4d:53 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:8e:de:cc:84:b9:72 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:12:8c:88:be:8e:ab Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.691339 4953 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.691462 4953 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.695595 4953 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.696024 4953 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.696066 4953 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.696355 4953 topology_manager.go:138] "Creating topology manager with none policy" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.696371 4953 container_manager_linux.go:303] "Creating device plugin manager" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.697049 4953 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.697088 4953 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.698141 4953 state_mem.go:36] "Initialized new in-memory state store" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.698274 4953 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.701449 4953 kubelet.go:418] "Attempting to sync node with API server" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.701480 4953 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.701508 4953 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.701522 4953 kubelet.go:324] "Adding apiserver pod source" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.701538 4953 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.707175 4953 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.708023 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.708051 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.708207 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.708113 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.709013 4953 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.711239 4953 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713392 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713440 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713457 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713473 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713498 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713513 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713529 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713555 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713573 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713653 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713681 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.713714 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.714994 4953 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.715911 4953 server.go:1280] "Started kubelet" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.717221 4953 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.718930 4953 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.719058 4953 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 01:00:13.901998641 +0000 UTC Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.719330 4953 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1438h13m44.182681529s for next certificate rotation Oct 11 02:46:29 crc systemd[1]: Started Kubernetes Kubelet. Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.720893 4953 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.720908 4953 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.721638 4953 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.721874 4953 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.721892 4953 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.720894 4953 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.722002 4953 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.722338 4953 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.724863 4953 factory.go:153] Registering CRI-O factory Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.725110 4953 factory.go:221] Registration of the crio container factory successfully Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.725418 4953 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.725815 4953 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="200ms" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.725841 4953 factory.go:55] Registering systemd factory Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.725874 4953 factory.go:221] Registration of the systemd container factory successfully Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.725930 4953 factory.go:103] Registering Raw factory Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.725951 4953 manager.go:1196] Started watching for new ooms in manager Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.725586 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.726652 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.725895 4953 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186d4fd14c22a606 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-11 02:46:29.715830278 +0000 UTC m=+0.648917962,LastTimestamp:2025-10-11 02:46:29.715830278 +0000 UTC m=+0.648917962,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.727631 4953 manager.go:319] Starting recovery of all containers Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.740597 4953 server.go:460] "Adding debug handlers to kubelet server" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748687 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748764 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748785 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748801 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748814 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748826 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748841 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748854 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748870 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748885 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748898 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748913 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748927 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748942 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748952 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748971 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.748987 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749002 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749020 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749036 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749053 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749067 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749085 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749133 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749146 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749163 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749204 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749222 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749238 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749252 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749267 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749284 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749301 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749319 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749338 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749355 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749375 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749391 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749441 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749462 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749478 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749495 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749512 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749529 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749546 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749563 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749582 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749618 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749639 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749654 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749668 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749682 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749702 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749719 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749741 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749767 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749783 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749799 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749813 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749829 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749845 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749861 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749877 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749893 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749908 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749922 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749961 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.749977 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750045 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750064 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750079 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750093 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750109 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750127 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750143 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750160 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750176 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750191 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750208 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750230 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750247 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750264 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750282 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750296 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750311 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750356 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750376 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750424 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750439 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750454 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750466 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750480 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750492 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750506 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750520 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750536 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750555 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.750575 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752440 4953 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752473 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752486 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752505 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752521 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752535 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752551 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752647 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752666 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752679 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752692 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752710 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752727 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752744 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752759 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752774 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752791 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752808 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752822 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752838 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752864 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752909 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752924 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752939 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752954 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752970 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752984 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.752999 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753015 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753032 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753065 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753083 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753099 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753114 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753129 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753144 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753160 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753175 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753189 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753206 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753220 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753290 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753306 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753325 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753337 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753380 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753396 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753409 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753424 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753437 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753451 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753465 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753480 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753517 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753533 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753547 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753560 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753573 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753588 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753620 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753638 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753655 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753669 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753683 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753696 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753709 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753722 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753736 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753749 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753765 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753781 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753797 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753810 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753826 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753839 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753853 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753866 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753883 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753896 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753951 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753966 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.753980 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754033 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754047 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754060 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754074 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754085 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754097 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754108 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754122 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754134 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754145 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754158 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754195 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754208 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754221 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754234 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754246 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754258 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754270 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754284 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754335 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754348 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754360 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754373 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754385 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754401 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754416 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754430 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754473 4953 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754485 4953 reconstruct.go:97] "Volume reconstruction finished" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.754499 4953 reconciler.go:26] "Reconciler: start to sync state" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.770437 4953 manager.go:324] Recovery completed Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.786389 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.787748 4953 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.789542 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.789692 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.789770 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.791299 4953 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.791334 4953 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.791363 4953 state_mem.go:36] "Initialized new in-memory state store" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.794020 4953 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.794090 4953 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.794136 4953 kubelet.go:2335] "Starting kubelet main sync loop" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.794294 4953 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 11 02:46:29 crc kubenswrapper[4953]: W1011 02:46:29.797784 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.797874 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.808986 4953 policy_none.go:49] "None policy: Start" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.810571 4953 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.810677 4953 state_mem.go:35] "Initializing new in-memory state store" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.822453 4953 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.875078 4953 manager.go:334] "Starting Device Plugin manager" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.875165 4953 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.875190 4953 server.go:79] "Starting device plugin registration server" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.875952 4953 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.876030 4953 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.876411 4953 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.876543 4953 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.876564 4953 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.888112 4953 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.894367 4953 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.894469 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.895902 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.895954 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.895966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.896154 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.896313 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.896388 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.897447 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.897473 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.897484 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.897670 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.897834 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.897880 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898180 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898217 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898228 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898753 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898787 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898823 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898835 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898914 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.898941 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.899302 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.900372 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.900468 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.900851 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.900875 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.900886 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.901023 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.901180 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.901216 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.903714 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.903739 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.903750 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.903929 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.903950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.903960 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.908779 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.908820 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.908833 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.909082 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.909118 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.910450 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.910512 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.910529 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.927505 4953 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="400ms" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.957074 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.957119 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.957148 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.957234 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.957418 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.958343 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.958485 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.958595 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.958724 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.958855 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.958946 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.959037 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.959147 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.959247 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.959336 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.976324 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.978130 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.978200 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.978224 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:29 crc kubenswrapper[4953]: I1011 02:46:29.978271 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:29 crc kubenswrapper[4953]: E1011 02:46:29.978958 4953 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.065711 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.065784 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.065831 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.065936 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.065969 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.065876 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066012 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066161 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066230 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066291 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066338 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066381 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066423 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066439 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066293 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066544 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066633 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066511 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066696 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066727 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066775 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066881 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066885 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066973 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066893 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.066991 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.067095 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.067134 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.067164 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.067293 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.179644 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.182111 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.182199 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.182220 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.182375 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:30 crc kubenswrapper[4953]: E1011 02:46:30.183566 4953 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.228805 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.237224 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.252217 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.270994 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.277494 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.292394 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-77b068ccb06402292308ba5712e332aaf75f68073c3373527b42b6888d16ea7b WatchSource:0}: Error finding container 77b068ccb06402292308ba5712e332aaf75f68073c3373527b42b6888d16ea7b: Status 404 returned error can't find the container with id 77b068ccb06402292308ba5712e332aaf75f68073c3373527b42b6888d16ea7b Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.293456 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-f188dd64abdedccb236ec2e1920632117b7df5a28f04d44188c3b204f28e01c8 WatchSource:0}: Error finding container f188dd64abdedccb236ec2e1920632117b7df5a28f04d44188c3b204f28e01c8: Status 404 returned error can't find the container with id f188dd64abdedccb236ec2e1920632117b7df5a28f04d44188c3b204f28e01c8 Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.298211 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-701ab76a5ed61afa908865088b547d1e187e4748f3e563616c4e436bb1eb25bb WatchSource:0}: Error finding container 701ab76a5ed61afa908865088b547d1e187e4748f3e563616c4e436bb1eb25bb: Status 404 returned error can't find the container with id 701ab76a5ed61afa908865088b547d1e187e4748f3e563616c4e436bb1eb25bb Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.307786 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7099b4ee1798f24f528fb1aed38031e80fbd431556a9b58831c897f2ee0fe78d WatchSource:0}: Error finding container 7099b4ee1798f24f528fb1aed38031e80fbd431556a9b58831c897f2ee0fe78d: Status 404 returned error can't find the container with id 7099b4ee1798f24f528fb1aed38031e80fbd431556a9b58831c897f2ee0fe78d Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.311257 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-7a1b2fedcbd4d713425c5bec14a0ee89940dda81db73635d62a776f2e56ac8e4 WatchSource:0}: Error finding container 7a1b2fedcbd4d713425c5bec14a0ee89940dda81db73635d62a776f2e56ac8e4: Status 404 returned error can't find the container with id 7a1b2fedcbd4d713425c5bec14a0ee89940dda81db73635d62a776f2e56ac8e4 Oct 11 02:46:30 crc kubenswrapper[4953]: E1011 02:46:30.328911 4953 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="800ms" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.584714 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.587106 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.587188 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.587208 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.587256 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:30 crc kubenswrapper[4953]: E1011 02:46:30.588069 4953 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.718977 4953 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.742178 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:30 crc kubenswrapper[4953]: E1011 02:46:30.742313 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.790388 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:30 crc kubenswrapper[4953]: E1011 02:46:30.790522 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.799518 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7099b4ee1798f24f528fb1aed38031e80fbd431556a9b58831c897f2ee0fe78d"} Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.801485 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"701ab76a5ed61afa908865088b547d1e187e4748f3e563616c4e436bb1eb25bb"} Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.803141 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f188dd64abdedccb236ec2e1920632117b7df5a28f04d44188c3b204f28e01c8"} Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.805361 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"77b068ccb06402292308ba5712e332aaf75f68073c3373527b42b6888d16ea7b"} Oct 11 02:46:30 crc kubenswrapper[4953]: I1011 02:46:30.806820 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7a1b2fedcbd4d713425c5bec14a0ee89940dda81db73635d62a776f2e56ac8e4"} Oct 11 02:46:30 crc kubenswrapper[4953]: W1011 02:46:30.959750 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:30 crc kubenswrapper[4953]: E1011 02:46:30.961080 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:31 crc kubenswrapper[4953]: W1011 02:46:31.103136 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:31 crc kubenswrapper[4953]: E1011 02:46:31.103280 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:31 crc kubenswrapper[4953]: E1011 02:46:31.129664 4953 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="1.6s" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.388500 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.391772 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.391826 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.391841 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.391880 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:31 crc kubenswrapper[4953]: E1011 02:46:31.392250 4953 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.719574 4953 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.814220 4953 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f" exitCode=0 Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.814352 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.815595 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.817671 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.817733 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.817754 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.818656 4953 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef" exitCode=0 Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.818750 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.818876 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.821059 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.821111 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.821132 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.821142 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.823684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.823730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.823884 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.824063 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.824178 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.824206 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.827975 4953 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d8c1067390eacec72c704201e6d7360929563a5d38dfdb8a3df658fca851513b" exitCode=0 Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.828028 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d8c1067390eacec72c704201e6d7360929563a5d38dfdb8a3df658fca851513b"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.828216 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.829672 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.829717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.829736 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.831838 4953 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7" exitCode=0 Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.831911 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7"} Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.831936 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.833286 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.833342 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:31 crc kubenswrapper[4953]: I1011 02:46:31.833363 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.718990 4953 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:32 crc kubenswrapper[4953]: E1011 02:46:32.730812 4953 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="3.2s" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.838559 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.838658 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.838682 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.838712 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.840176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.840213 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.840224 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.844397 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.844506 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.845890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.845927 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.845937 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.848052 4953 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="55be49e9105dec9b9839f2f134ed0d7b14ac9ace6617105eddc895c7c6510ce9" exitCode=0 Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.848129 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"55be49e9105dec9b9839f2f134ed0d7b14ac9ace6617105eddc895c7c6510ce9"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.848257 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.849205 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.849231 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.849239 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.852843 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"261344b70969642c0d021b5f2b786d710391c5114aa756651d5cf7a436514f58"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.852929 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.854172 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.854270 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.854291 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.862929 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.862985 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.863001 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052"} Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.863014 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d"} Oct 11 02:46:32 crc kubenswrapper[4953]: W1011 02:46:32.898406 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:32 crc kubenswrapper[4953]: E1011 02:46:32.898507 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:32 crc kubenswrapper[4953]: W1011 02:46:32.982483 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 11 02:46:32 crc kubenswrapper[4953]: E1011 02:46:32.982592 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.993000 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.995038 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.995071 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.995084 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:32 crc kubenswrapper[4953]: I1011 02:46:32.995499 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:32 crc kubenswrapper[4953]: E1011 02:46:32.996682 4953 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.873944 4953 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="19cf62b826fc413c13ff79028a60104d024ca8ea11733a43477d85ee075ef98b" exitCode=0 Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.874032 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"19cf62b826fc413c13ff79028a60104d024ca8ea11733a43477d85ee075ef98b"} Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.874823 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.876979 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.877037 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.877060 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.878717 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de"} Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.878773 4953 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.878800 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.878846 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.878911 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.878853 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.879779 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.879845 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.879870 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881145 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881143 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881232 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881252 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881255 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881232 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881307 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881288 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:33 crc kubenswrapper[4953]: I1011 02:46:33.881412 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.887580 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8a5b1cccafe8c632f531b262b39729c1d29b50eb0d6d47b7b1b8f75d5394df9a"} Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.887682 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"528b45cfc0da22923048fe714c97e2f818c1a96349a3b4b9c9a70899f61d7b6b"} Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.887715 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a93a466ea3116760172a49f11022c6ed572510ebf0db0f79e064b5ad9ffce0bd"} Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.887754 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.887718 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.889100 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.889146 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:34 crc kubenswrapper[4953]: I1011 02:46:34.889159 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.380550 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.899669 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2fb94e2f183dfca216a32a9248d272f285250c02ff77277f42ef61b34e4a0462"} Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.899735 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.899744 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fe834d0b759baf04252e7b526777a80c802a94b170cfea601b70c421081fb771"} Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.900437 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.901124 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.901197 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.901218 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.901999 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.902054 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:35 crc kubenswrapper[4953]: I1011 02:46:35.902076 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.060222 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.197145 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.198764 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.198842 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.198862 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.198914 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.903421 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.903553 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.905593 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.905678 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.905697 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.905597 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.905804 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:36 crc kubenswrapper[4953]: I1011 02:46:36.905830 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.576530 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.576935 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.578838 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.578916 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.578943 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.906903 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.908519 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.908579 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.908591 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.960474 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.960826 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.962810 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.962894 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:37 crc kubenswrapper[4953]: I1011 02:46:37.962917 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.096954 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.097297 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.099306 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.099367 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.099387 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.240144 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.270551 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.909974 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.911226 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.911315 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.911335 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:38 crc kubenswrapper[4953]: I1011 02:46:38.957006 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:39 crc kubenswrapper[4953]: E1011 02:46:39.888481 4953 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 11 02:46:39 crc kubenswrapper[4953]: I1011 02:46:39.912292 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:39 crc kubenswrapper[4953]: I1011 02:46:39.913444 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:39 crc kubenswrapper[4953]: I1011 02:46:39.913518 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:39 crc kubenswrapper[4953]: I1011 02:46:39.913544 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.380143 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.380553 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.382365 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.382420 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.382438 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.915488 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.917570 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.917735 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.917777 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:40 crc kubenswrapper[4953]: I1011 02:46:40.922861 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.918816 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.920299 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.920396 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.920423 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.957636 4953 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.957758 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 02:46:41 crc kubenswrapper[4953]: I1011 02:46:41.976816 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:42 crc kubenswrapper[4953]: I1011 02:46:42.922889 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:42 crc kubenswrapper[4953]: I1011 02:46:42.924278 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:42 crc kubenswrapper[4953]: I1011 02:46:42.924347 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:42 crc kubenswrapper[4953]: I1011 02:46:42.924367 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:43 crc kubenswrapper[4953]: W1011 02:46:43.686537 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 11 02:46:43 crc kubenswrapper[4953]: I1011 02:46:43.686737 4953 trace.go:236] Trace[127483706]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 02:46:33.685) (total time: 10001ms): Oct 11 02:46:43 crc kubenswrapper[4953]: Trace[127483706]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (02:46:43.686) Oct 11 02:46:43 crc kubenswrapper[4953]: Trace[127483706]: [10.001387356s] [10.001387356s] END Oct 11 02:46:43 crc kubenswrapper[4953]: E1011 02:46:43.686779 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 11 02:46:43 crc kubenswrapper[4953]: I1011 02:46:43.719832 4953 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 11 02:46:44 crc kubenswrapper[4953]: W1011 02:46:44.284859 4953 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 11 02:46:44 crc kubenswrapper[4953]: I1011 02:46:44.284998 4953 trace.go:236] Trace[1477442323]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 02:46:34.282) (total time: 10002ms): Oct 11 02:46:44 crc kubenswrapper[4953]: Trace[1477442323]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (02:46:44.284) Oct 11 02:46:44 crc kubenswrapper[4953]: Trace[1477442323]: [10.002075925s] [10.002075925s] END Oct 11 02:46:44 crc kubenswrapper[4953]: E1011 02:46:44.285034 4953 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 11 02:46:44 crc kubenswrapper[4953]: I1011 02:46:44.544240 4953 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 11 02:46:44 crc kubenswrapper[4953]: I1011 02:46:44.544357 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 11 02:46:44 crc kubenswrapper[4953]: I1011 02:46:44.548961 4953 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 11 02:46:44 crc kubenswrapper[4953]: I1011 02:46:44.549034 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 11 02:46:45 crc kubenswrapper[4953]: I1011 02:46:45.390575 4953 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]log ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]etcd ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/openshift.io-api-request-count-filter ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/openshift.io-startkubeinformers ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/generic-apiserver-start-informers ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/priority-and-fairness-config-consumer ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/priority-and-fairness-filter ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-apiextensions-informers ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-apiextensions-controllers ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/crd-informer-synced ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-system-namespaces-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-cluster-authentication-info-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-legacy-token-tracking-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-service-ip-repair-controllers ok Oct 11 02:46:45 crc kubenswrapper[4953]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Oct 11 02:46:45 crc kubenswrapper[4953]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/priority-and-fairness-config-producer ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/bootstrap-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/start-kube-aggregator-informers ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-status-local-available-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-status-remote-available-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-registration-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-wait-for-first-sync ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-discovery-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/kube-apiserver-autoregistration ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]autoregister-completion ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-openapi-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: [+]poststarthook/apiservice-openapiv3-controller ok Oct 11 02:46:45 crc kubenswrapper[4953]: livez check failed Oct 11 02:46:45 crc kubenswrapper[4953]: I1011 02:46:45.390734 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:46:47 crc kubenswrapper[4953]: I1011 02:46:47.134949 4953 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.137683 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.137920 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.139456 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.139512 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.139531 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.153309 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.940409 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.947078 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.947185 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:48 crc kubenswrapper[4953]: I1011 02:46:48.947214 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.178891 4953 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.530795 4953 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.532753 4953 trace.go:236] Trace[127377226]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 02:46:37.597) (total time: 11935ms): Oct 11 02:46:49 crc kubenswrapper[4953]: Trace[127377226]: ---"Objects listed" error: 11935ms (02:46:49.532) Oct 11 02:46:49 crc kubenswrapper[4953]: Trace[127377226]: [11.935127113s] [11.935127113s] END Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.532784 4953 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.533900 4953 trace.go:236] Trace[1400472916]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Oct-2025 02:46:37.931) (total time: 11601ms): Oct 11 02:46:49 crc kubenswrapper[4953]: Trace[1400472916]: ---"Objects listed" error: 11601ms (02:46:49.533) Oct 11 02:46:49 crc kubenswrapper[4953]: Trace[1400472916]: [11.601872607s] [11.601872607s] END Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.533921 4953 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.537171 4953 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.537936 4953 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.709798 4953 apiserver.go:52] "Watching apiserver" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.715167 4953 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.715654 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.716280 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.716788 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.716903 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.716921 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.717025 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.717067 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.717157 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.717219 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.717453 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.719226 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.719584 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.719703 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.720038 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.720069 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.720811 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.720906 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.721109 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.722852 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.723362 4953 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739116 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739185 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739235 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739275 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739338 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739370 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739405 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739437 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739472 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739506 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739582 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739642 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739681 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739718 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739752 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739788 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.739820 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.740888 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.741974 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.742312 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.742596 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.742913 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.743004 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.743221 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.743304 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.743350 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744507 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744666 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744725 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744773 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744772 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744805 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.744992 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745069 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745138 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745206 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745555 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745115 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745272 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745393 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745179 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745759 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745923 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745828 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.745987 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746042 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746221 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746340 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746525 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746587 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746811 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746922 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747005 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747147 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747470 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747539 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747597 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748176 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748685 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748748 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748789 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748836 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748881 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748927 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748964 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749007 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749058 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749102 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749140 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749182 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749225 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749288 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749916 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749982 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.746328 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747104 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747324 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.747693 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748052 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.748992 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749357 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749800 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.749901 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.750897 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.750984 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.751837 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.751974 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.752027 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.753003 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.753093 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.754570 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:46:50.250011714 +0000 UTC m=+21.183099388 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.754725 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.754813 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.754875 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.754927 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.754984 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755005 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755032 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755072 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755109 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755149 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755185 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755228 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755279 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755449 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755499 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755534 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755554 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755569 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755655 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755706 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755824 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755891 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755931 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755942 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.755972 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756009 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756068 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756104 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756140 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756173 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756215 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756231 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756259 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756298 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756335 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756371 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756408 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756441 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756859 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756971 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757021 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757058 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757133 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757180 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757215 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757249 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757284 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757316 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757482 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757541 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757580 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757641 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757701 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757749 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757893 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758129 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758167 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758201 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758242 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758280 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758683 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758773 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758812 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759442 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759535 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759580 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759653 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759696 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759780 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759826 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759870 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759911 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759957 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760007 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760085 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760171 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760297 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760358 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760441 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760477 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760517 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760555 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760588 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760658 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760763 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760795 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760829 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760870 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760906 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760943 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760978 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761012 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761046 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761079 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761110 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761146 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761195 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761229 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761262 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761302 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761338 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761379 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761436 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761476 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761514 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761553 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761589 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761650 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761685 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761719 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761752 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761786 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761809 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761841 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761865 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761892 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761918 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761944 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761970 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761997 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762027 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762050 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762074 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762098 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762130 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762169 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762206 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762232 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762256 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762279 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762303 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762328 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762356 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762386 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.756964 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762420 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762449 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762473 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762475 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762500 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757225 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757855 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.757980 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762531 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762559 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762586 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762631 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762692 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762731 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762768 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762797 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762830 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762864 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762899 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762941 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762977 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763020 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763067 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763105 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763146 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763186 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763310 4953 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763326 4953 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763345 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763368 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763390 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763411 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763431 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763449 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763466 4953 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763485 4953 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763505 4953 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763524 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763544 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763563 4953 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763585 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763628 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763649 4953 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763669 4953 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763690 4953 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763709 4953 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763729 4953 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763746 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763765 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763783 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763800 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763819 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763838 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763856 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763876 4953 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763897 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763917 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763936 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763955 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763977 4953 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763997 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764016 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764036 4953 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764057 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764079 4953 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764097 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764115 4953 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770336 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.776072 4953 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.779075 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.781189 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.783237 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758158 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758207 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758216 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758327 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758544 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.758912 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759040 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759138 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759664 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.759809 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760201 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760801 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760848 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761193 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.760896 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.761884 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762386 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762550 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762945 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.762722 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763127 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763172 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763630 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.763661 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764121 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.764227 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764448 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.764938 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.765052 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.765113 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.765506 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.765542 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.765546 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.765900 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.766266 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.766290 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.766631 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.767343 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.767367 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.767482 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.768101 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.768106 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.768293 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769024 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769100 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769203 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769354 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769506 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769705 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769746 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769778 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769790 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.769822 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770008 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770054 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770243 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770439 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770570 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770696 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.770716 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.771270 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.771286 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.771344 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.771374 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.771364 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.772396 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.772440 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.772539 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.773105 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.773277 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.774147 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.774507 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.774719 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.774738 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.774884 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.775225 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.776409 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.776219 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.775491 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777338 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777595 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777767 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777746 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777882 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777929 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.777971 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.778037 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.778345 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.778376 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.778647 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.778690 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.780857 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.780894 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.780961 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.781020 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.781129 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.781184 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.781244 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.778187 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.782507 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.793398 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:50.293362547 +0000 UTC m=+21.226450221 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.793766 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:50.293751256 +0000 UTC m=+21.226838940 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.796451 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.796703 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.796710 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.796917 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.797103 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.799106 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.799265 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.799642 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.799794 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.800201 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.800505 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.800759 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.801141 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.803358 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.803563 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.803795 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.804737 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.805106 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.806392 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.806971 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.807013 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.807036 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.807125 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:50.307101717 +0000 UTC m=+21.240189401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.807169 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.808073 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.808097 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:49 crc kubenswrapper[4953]: E1011 02:46:49.808185 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:50.308155312 +0000 UTC m=+21.241242956 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.807894 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.807757 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.808810 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.809735 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.810022 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.810039 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.810819 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.811045 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.811241 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.811390 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.811794 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.817490 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.817520 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.817597 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.817848 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.818082 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.818129 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.818878 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.818893 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.819223 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.819270 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.819496 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.819873 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.820050 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.819225 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.820163 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.820196 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.821285 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.821501 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.822164 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.822750 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.822769 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.822797 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.823028 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.823062 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.823720 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.823875 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.824233 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.824380 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.824728 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.825494 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.828558 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.830204 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.830732 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.834260 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.835954 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.840912 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.840935 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.842416 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.849291 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.850380 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.852762 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.853680 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.855254 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.855569 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.858896 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.860244 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.862810 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.863385 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.864358 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.865049 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.865695 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866192 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866293 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866313 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866560 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866665 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866719 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866743 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866766 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866792 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866810 4953 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866830 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866839 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866850 4953 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866899 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866952 4953 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866965 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866978 4953 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.866989 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867000 4953 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867010 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867020 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867030 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867062 4953 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867082 4953 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867093 4953 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867105 4953 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867118 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867127 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867136 4953 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867146 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867156 4953 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867166 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867177 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867188 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867196 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867207 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867217 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867364 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867384 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867394 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867405 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867419 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867463 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867474 4953 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867499 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867484 4953 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867533 4953 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867542 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867551 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867573 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867583 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867593 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867621 4953 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867630 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867639 4953 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867652 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867664 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867676 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867686 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867695 4953 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867704 4953 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867714 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867722 4953 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867730 4953 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867741 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867750 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867760 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867770 4953 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867779 4953 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867788 4953 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867823 4953 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867833 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867842 4953 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867852 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867864 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867873 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867883 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.867984 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868109 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868269 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868288 4953 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868300 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868312 4953 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868322 4953 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868333 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868345 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868354 4953 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868365 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868374 4953 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868385 4953 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868396 4953 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868406 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868417 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868427 4953 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868436 4953 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868447 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868457 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868466 4953 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868476 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868486 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868495 4953 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868505 4953 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868514 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868525 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868537 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868546 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868556 4953 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868566 4953 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868575 4953 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868584 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868593 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868621 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868632 4953 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868642 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868652 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868661 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868670 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868682 4953 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.868693 4953 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.872436 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.872705 4953 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.872840 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.872964 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.873212 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.873948 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874533 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874647 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874681 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874696 4953 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874710 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874724 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874741 4953 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874756 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874768 4953 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.874940 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875074 4953 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875096 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875116 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875130 4953 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875142 4953 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875153 4953 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875169 4953 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875180 4953 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875196 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875211 4953 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875221 4953 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875233 4953 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875247 4953 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875263 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875275 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875287 4953 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875298 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875312 4953 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875324 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875335 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875347 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875360 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875371 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875385 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875403 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875413 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875437 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.875924 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.878671 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.879807 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.881440 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.882777 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.884304 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.884886 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.886327 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.887837 4953 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.888160 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.890171 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.890722 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.892084 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.894000 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.894673 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.895591 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.896357 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.897547 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.898046 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.898311 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.899158 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.899837 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.901031 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.901577 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.902573 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.903163 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.904405 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.904937 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.906201 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.907352 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.908773 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.911169 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.912510 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.912704 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.930666 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.945053 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.945730 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.954823 4953 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de" exitCode=255 Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.954920 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de"} Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.960543 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.970237 4953 scope.go:117] "RemoveContainer" containerID="b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.970710 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.971742 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.976011 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.983147 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:49 crc kubenswrapper[4953]: I1011 02:46:49.995948 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.009542 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.026502 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.038685 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.039101 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 11 02:46:50 crc kubenswrapper[4953]: W1011 02:46:50.059895 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-e90f9a590bfcd7b3f802456399fc92398232ab03985fd3ac437967c56ab3cc45 WatchSource:0}: Error finding container e90f9a590bfcd7b3f802456399fc92398232ab03985fd3ac437967c56ab3cc45: Status 404 returned error can't find the container with id e90f9a590bfcd7b3f802456399fc92398232ab03985fd3ac437967c56ab3cc45 Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.103375 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 11 02:46:50 crc kubenswrapper[4953]: W1011 02:46:50.119016 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-9aa752007ad15415f6783672809c60c13c0cb1aaaffdc5edac6723534e8474b8 WatchSource:0}: Error finding container 9aa752007ad15415f6783672809c60c13c0cb1aaaffdc5edac6723534e8474b8: Status 404 returned error can't find the container with id 9aa752007ad15415f6783672809c60c13c0cb1aaaffdc5edac6723534e8474b8 Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.124162 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 11 02:46:50 crc kubenswrapper[4953]: W1011 02:46:50.142025 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-73cf8261921047a851646090b0590bc8bf26d29f0e8e4619662cfaf70be8f679 WatchSource:0}: Error finding container 73cf8261921047a851646090b0590bc8bf26d29f0e8e4619662cfaf70be8f679: Status 404 returned error can't find the container with id 73cf8261921047a851646090b0590bc8bf26d29f0e8e4619662cfaf70be8f679 Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.261065 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.269069 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.279287 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.280438 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.280655 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:46:51.280619787 +0000 UTC m=+22.213707441 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.282309 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.296674 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.311118 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.324453 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.336062 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.352414 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.366783 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.378950 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.382026 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.382096 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.382124 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.382175 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382239 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382262 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382277 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382283 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382328 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:51.382311054 +0000 UTC m=+22.315398688 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382402 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:51.382362975 +0000 UTC m=+22.315450619 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382455 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382487 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382531 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382550 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382589 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:51.382551539 +0000 UTC m=+22.315639333 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.382673 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:51.382651822 +0000 UTC m=+22.315739666 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.383709 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.395832 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.414106 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.429274 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.447044 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.461075 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.473938 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.492154 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.510563 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.526738 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.539694 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.550785 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.598731 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.613921 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.628739 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.638929 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.802806 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:50 crc kubenswrapper[4953]: E1011 02:46:50.802958 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.960771 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"73cf8261921047a851646090b0590bc8bf26d29f0e8e4619662cfaf70be8f679"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.962646 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.962722 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.962741 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9aa752007ad15415f6783672809c60c13c0cb1aaaffdc5edac6723534e8474b8"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.963890 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.963944 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e90f9a590bfcd7b3f802456399fc92398232ab03985fd3ac437967c56ab3cc45"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.966858 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.970468 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6"} Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.970845 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.982462 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:46:50 crc kubenswrapper[4953]: I1011 02:46:50.996155 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.010369 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.032347 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.054877 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.070499 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.088971 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.114963 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.132516 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.149775 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.174764 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.198005 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.210795 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.228126 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.258491 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.291429 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.291712 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.291669168 +0000 UTC m=+24.224756812 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.298586 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.325847 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.392879 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.392925 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.392947 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.392967 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393089 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393082 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393157 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.393141199 +0000 UTC m=+24.326228843 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393202 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.39317956 +0000 UTC m=+24.326267204 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393199 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393241 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393257 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393329 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.393309643 +0000 UTC m=+24.326397287 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393354 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393386 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393410 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.393451 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.393442716 +0000 UTC m=+24.326530350 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.749007 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-tshlv"] Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.749361 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.756315 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.756401 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.757697 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.775457 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.789540 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.795333 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.795419 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.795559 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:46:51 crc kubenswrapper[4953]: E1011 02:46:51.795719 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.799274 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.828729 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.847068 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.871933 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.896665 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/cd73dbbf-9c04-4826-9bbd-56341abba133-hosts-file\") pod \"node-resolver-tshlv\" (UID: \"cd73dbbf-9c04-4826-9bbd-56341abba133\") " pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.896725 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc9t9\" (UniqueName: \"kubernetes.io/projected/cd73dbbf-9c04-4826-9bbd-56341abba133-kube-api-access-qc9t9\") pod \"node-resolver-tshlv\" (UID: \"cd73dbbf-9c04-4826-9bbd-56341abba133\") " pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.910650 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.929903 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.944803 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.968406 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:51Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.997510 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc9t9\" (UniqueName: \"kubernetes.io/projected/cd73dbbf-9c04-4826-9bbd-56341abba133-kube-api-access-qc9t9\") pod \"node-resolver-tshlv\" (UID: \"cd73dbbf-9c04-4826-9bbd-56341abba133\") " pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.997589 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/cd73dbbf-9c04-4826-9bbd-56341abba133-hosts-file\") pod \"node-resolver-tshlv\" (UID: \"cd73dbbf-9c04-4826-9bbd-56341abba133\") " pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:51 crc kubenswrapper[4953]: I1011 02:46:51.997693 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/cd73dbbf-9c04-4826-9bbd-56341abba133-hosts-file\") pod \"node-resolver-tshlv\" (UID: \"cd73dbbf-9c04-4826-9bbd-56341abba133\") " pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.020220 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc9t9\" (UniqueName: \"kubernetes.io/projected/cd73dbbf-9c04-4826-9bbd-56341abba133-kube-api-access-qc9t9\") pod \"node-resolver-tshlv\" (UID: \"cd73dbbf-9c04-4826-9bbd-56341abba133\") " pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.064155 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-tshlv" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.084807 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd73dbbf_9c04_4826_9bbd_56341abba133.slice/crio-d05ad005d8826499c0248a8dfc1912055e105fca07924c96c4d2cd0da0edcfb8 WatchSource:0}: Error finding container d05ad005d8826499c0248a8dfc1912055e105fca07924c96c4d2cd0da0edcfb8: Status 404 returned error can't find the container with id d05ad005d8826499c0248a8dfc1912055e105fca07924c96c4d2cd0da0edcfb8 Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.152631 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-t8zfg"] Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.153097 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.155760 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-9jz9g"] Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.156013 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-fxswv"] Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.156681 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.157728 4953 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.157746 4953 reflector.go:561] object-"openshift-multus"/"cni-copy-resources": failed to list *v1.ConfigMap: configmaps "cni-copy-resources" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.157759 4953 reflector.go:561] object-"openshift-multus"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.157806 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"cni-copy-resources\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"cni-copy-resources\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.157832 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.157829 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.157772 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.157746 4953 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.158280 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.158715 4953 reflector.go:561] object-"openshift-multus"/"default-dockercfg-2q5b6": failed to list *v1.Secret: secrets "default-dockercfg-2q5b6" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.158782 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-dockercfg-2q5b6\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-2q5b6\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.159988 4953 reflector.go:561] object-"openshift-machine-config-operator"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.160024 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.160080 4953 reflector.go:561] object-"openshift-machine-config-operator"/"proxy-tls": failed to list *v1.Secret: secrets "proxy-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.160103 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"proxy-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"proxy-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.160310 4953 reflector.go:561] object-"openshift-multus"/"default-cni-sysctl-allowlist": failed to list *v1.ConfigMap: configmaps "default-cni-sysctl-allowlist" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.160334 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"default-cni-sysctl-allowlist\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"default-cni-sysctl-allowlist\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.160379 4953 reflector.go:561] object-"openshift-machine-config-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.160392 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.160414 4953 reflector.go:561] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-config-operator": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.160444 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-config-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-config-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.160541 4953 reflector.go:561] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": failed to list *v1.Secret: secrets "multus-ancillary-tools-dockercfg-vnmsz" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.160556 4953 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-ancillary-tools-dockercfg-vnmsz\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"multus-ancillary-tools-dockercfg-vnmsz\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.162520 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.180441 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.210865 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.228919 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.252205 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.289806 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301133 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-cni-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301172 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-conf-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301199 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-system-cni-dir\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301222 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-hostroot\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301240 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e359a42a-4046-4856-8936-b570d11fb061-cni-binary-copy\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301285 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a55d5e63-14a1-4d53-be84-21dce9f0c53d-mcd-auth-proxy-config\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301312 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwtzr\" (UniqueName: \"kubernetes.io/projected/5a114089-658e-442c-b755-9ca9b127f368-kube-api-access-pwtzr\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301347 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-etc-kubernetes\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301371 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-cni-multus\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301396 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh2dx\" (UniqueName: \"kubernetes.io/projected/a55d5e63-14a1-4d53-be84-21dce9f0c53d-kube-api-access-rh2dx\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301492 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301546 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-system-cni-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301571 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-cni-bin\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301635 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5a114089-658e-442c-b755-9ca9b127f368-cni-binary-copy\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301654 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e359a42a-4046-4856-8936-b570d11fb061-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301671 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-cnibin\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301687 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-cnibin\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301703 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-kubelet\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301719 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-multus-certs\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301739 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-os-release\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301758 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blmpp\" (UniqueName: \"kubernetes.io/projected/e359a42a-4046-4856-8936-b570d11fb061-kube-api-access-blmpp\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301775 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-os-release\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301799 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a55d5e63-14a1-4d53-be84-21dce9f0c53d-rootfs\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301814 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a55d5e63-14a1-4d53-be84-21dce9f0c53d-proxy-tls\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301837 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/5a114089-658e-442c-b755-9ca9b127f368-multus-daemon-config\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301864 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-netns\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301882 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-socket-dir-parent\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.301899 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-k8s-cni-cncf-io\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.312416 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.332766 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.353573 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.373326 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.389569 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403214 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403253 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-system-cni-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403272 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-cni-bin\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403302 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5a114089-658e-442c-b755-9ca9b127f368-cni-binary-copy\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403323 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e359a42a-4046-4856-8936-b570d11fb061-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403346 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-cnibin\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403370 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-cnibin\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403399 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-kubelet\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403430 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-multus-certs\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403456 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-os-release\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403477 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-cnibin\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403560 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-cnibin\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403563 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-kubelet\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403577 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-cni-bin\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403650 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-multus-certs\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403663 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blmpp\" (UniqueName: \"kubernetes.io/projected/e359a42a-4046-4856-8936-b570d11fb061-kube-api-access-blmpp\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-os-release\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403777 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a55d5e63-14a1-4d53-be84-21dce9f0c53d-rootfs\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403797 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a55d5e63-14a1-4d53-be84-21dce9f0c53d-proxy-tls\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403827 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/5a114089-658e-442c-b755-9ca9b127f368-multus-daemon-config\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403858 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-netns\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403878 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-socket-dir-parent\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403896 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-k8s-cni-cncf-io\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403854 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-tuning-conf-dir\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403947 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/a55d5e63-14a1-4d53-be84-21dce9f0c53d-rootfs\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403980 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-socket-dir-parent\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403999 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-netns\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403918 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-cni-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404027 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-os-release\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404037 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-system-cni-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404055 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-conf-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403971 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-os-release\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.403981 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-run-k8s-cni-cncf-io\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404100 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-cni-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404116 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-multus-conf-dir\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404241 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-system-cni-dir\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404238 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e359a42a-4046-4856-8936-b570d11fb061-system-cni-dir\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404303 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-hostroot\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404328 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e359a42a-4046-4856-8936-b570d11fb061-cni-binary-copy\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404353 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a55d5e63-14a1-4d53-be84-21dce9f0c53d-mcd-auth-proxy-config\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404378 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwtzr\" (UniqueName: \"kubernetes.io/projected/5a114089-658e-442c-b755-9ca9b127f368-kube-api-access-pwtzr\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404419 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-etc-kubernetes\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404434 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-hostroot\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404439 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404516 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-etc-kubernetes\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404443 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-cni-multus\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404471 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/5a114089-658e-442c-b755-9ca9b127f368-host-var-lib-cni-multus\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.404636 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh2dx\" (UniqueName: \"kubernetes.io/projected/a55d5e63-14a1-4d53-be84-21dce9f0c53d-kube-api-access-rh2dx\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.418151 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.428832 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.457965 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.472427 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.488656 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.505619 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.519487 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.539926 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.558079 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.572024 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.581909 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7277g"] Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.582935 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.585631 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.585715 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.585950 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.586076 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.586330 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.586344 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.586336 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.595094 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.608368 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.621128 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.634004 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.644529 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.660266 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.678224 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.696645 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707216 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f390367a-136d-4992-a5a8-75d12ae2a94a-ovn-node-metrics-cert\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707320 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-kubelet\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707397 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707458 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-var-lib-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707484 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-ovn\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707544 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-log-socket\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707568 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-config\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707733 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-ovn-kubernetes\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707792 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-script-lib\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707845 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-node-log\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707872 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.707977 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-bin\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708011 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-systemd\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708048 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-systemd-units\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708113 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-etc-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708156 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-slash\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708180 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-netns\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708201 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-env-overrides\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708265 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-netd\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.708290 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmskm\" (UniqueName: \"kubernetes.io/projected/f390367a-136d-4992-a5a8-75d12ae2a94a-kube-api-access-dmskm\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.711650 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.724972 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.737822 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.761994 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.788026 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.794479 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:52 crc kubenswrapper[4953]: E1011 02:46:52.794585 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.802131 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:52Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809227 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-bin\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809286 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-systemd\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809330 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-systemd-units\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809353 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-etc-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809393 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-slash\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809389 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-systemd-units\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809422 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-netns\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809437 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-systemd\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809485 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-netns\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809487 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-env-overrides\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809509 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-etc-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809539 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-slash\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809348 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-bin\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809572 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-netd\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809744 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-netd\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809761 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmskm\" (UniqueName: \"kubernetes.io/projected/f390367a-136d-4992-a5a8-75d12ae2a94a-kube-api-access-dmskm\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809816 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f390367a-136d-4992-a5a8-75d12ae2a94a-ovn-node-metrics-cert\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809881 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-kubelet\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809907 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809935 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-var-lib-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809957 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-ovn\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809965 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-kubelet\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.809982 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-log-socket\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810009 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-config\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810019 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810048 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-ovn-kubernetes\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810073 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-ovn-kubernetes\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810082 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-script-lib\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810010 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-var-lib-openvswitch\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810050 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-log-socket\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810167 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-ovn\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810211 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-node-log\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810261 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-env-overrides\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810326 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-node-log\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810406 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810483 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.810788 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-config\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.811114 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-script-lib\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.813627 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f390367a-136d-4992-a5a8-75d12ae2a94a-ovn-node-metrics-cert\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.827078 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmskm\" (UniqueName: \"kubernetes.io/projected/f390367a-136d-4992-a5a8-75d12ae2a94a-kube-api-access-dmskm\") pod \"ovnkube-node-7277g\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.895203 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:46:52 crc kubenswrapper[4953]: W1011 02:46:52.915851 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf390367a_136d_4992_a5a8_75d12ae2a94a.slice/crio-fea333d76f7d5ff44f08fa16588a22989c4784b3693020aa652fe16748b1fd50 WatchSource:0}: Error finding container fea333d76f7d5ff44f08fa16588a22989c4784b3693020aa652fe16748b1fd50: Status 404 returned error can't find the container with id fea333d76f7d5ff44f08fa16588a22989c4784b3693020aa652fe16748b1fd50 Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.981956 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"fea333d76f7d5ff44f08fa16588a22989c4784b3693020aa652fe16748b1fd50"} Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.983990 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-tshlv" event={"ID":"cd73dbbf-9c04-4826-9bbd-56341abba133","Type":"ContainerStarted","Data":"43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472"} Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.984016 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-tshlv" event={"ID":"cd73dbbf-9c04-4826-9bbd-56341abba133","Type":"ContainerStarted","Data":"d05ad005d8826499c0248a8dfc1912055e105fca07924c96c4d2cd0da0edcfb8"} Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.991159 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.994949 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5a114089-658e-442c-b755-9ca9b127f368-cni-binary-copy\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:52 crc kubenswrapper[4953]: I1011 02:46:52.995487 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e359a42a-4046-4856-8936-b570d11fb061-cni-binary-copy\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.006984 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.020573 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.025354 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e359a42a-4046-4856-8936-b570d11fb061-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.025516 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.039379 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.054517 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.075890 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.078820 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.083251 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.084855 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/5a114089-658e-442c-b755-9ca9b127f368-multus-daemon-config\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.097869 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.112838 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.131467 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.162986 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.202860 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.253124 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.253199 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.301705 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.315359 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.315584 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:46:57.315551521 +0000 UTC m=+28.248639185 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.345185 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:53Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.404487 4953 secret.go:188] Couldn't get secret openshift-machine-config-operator/proxy-tls: failed to sync secret cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.404644 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a55d5e63-14a1-4d53-be84-21dce9f0c53d-proxy-tls podName:a55d5e63-14a1-4d53-be84-21dce9f0c53d nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.904575892 +0000 UTC m=+24.837663567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/a55d5e63-14a1-4d53-be84-21dce9f0c53d-proxy-tls") pod "machine-config-daemon-9jz9g" (UID: "a55d5e63-14a1-4d53-be84-21dce9f0c53d") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.404757 4953 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.404815 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a55d5e63-14a1-4d53-be84-21dce9f0c53d-mcd-auth-proxy-config podName:a55d5e63-14a1-4d53-be84-21dce9f0c53d nodeName:}" failed. No retries permitted until 2025-10-11 02:46:53.904800478 +0000 UTC m=+24.837888152 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "mcd-auth-proxy-config" (UniqueName: "kubernetes.io/configmap/a55d5e63-14a1-4d53-be84-21dce9f0c53d-mcd-auth-proxy-config") pod "machine-config-daemon-9jz9g" (UID: "a55d5e63-14a1-4d53-be84-21dce9f0c53d") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.416863 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.416951 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.416947 4953 projected.go:288] Couldn't get configMap openshift-machine-config-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.416996 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.417049 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417113 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417166 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417196 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417448 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417462 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417216 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417501 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417509 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417809 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:57.41735845 +0000 UTC m=+28.350446304 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417845 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:57.417837011 +0000 UTC m=+28.350924655 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417860 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:57.417854652 +0000 UTC m=+28.350942296 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.417874 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:46:57.417866362 +0000 UTC m=+28.350954006 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.460858 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.501308 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.512193 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.553188 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.650646 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.657651 4953 projected.go:194] Error preparing data for projected volume kube-api-access-rh2dx for pod openshift-machine-config-operator/machine-config-daemon-9jz9g: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.657746 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a55d5e63-14a1-4d53-be84-21dce9f0c53d-kube-api-access-rh2dx podName:a55d5e63-14a1-4d53-be84-21dce9f0c53d nodeName:}" failed. No retries permitted until 2025-10-11 02:46:54.157723922 +0000 UTC m=+25.090811566 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-rh2dx" (UniqueName: "kubernetes.io/projected/a55d5e63-14a1-4d53-be84-21dce9f0c53d-kube-api-access-rh2dx") pod "machine-config-daemon-9jz9g" (UID: "a55d5e63-14a1-4d53-be84-21dce9f0c53d") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.725719 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.732341 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwtzr\" (UniqueName: \"kubernetes.io/projected/5a114089-658e-442c-b755-9ca9b127f368-kube-api-access-pwtzr\") pod \"multus-t8zfg\" (UID: \"5a114089-658e-442c-b755-9ca9b127f368\") " pod="openshift-multus/multus-t8zfg" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.734405 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blmpp\" (UniqueName: \"kubernetes.io/projected/e359a42a-4046-4856-8936-b570d11fb061-kube-api-access-blmpp\") pod \"multus-additional-cni-plugins-fxswv\" (UID: \"e359a42a-4046-4856-8936-b570d11fb061\") " pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.794652 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.794937 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.795224 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:53 crc kubenswrapper[4953]: E1011 02:46:53.795812 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.924889 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a55d5e63-14a1-4d53-be84-21dce9f0c53d-proxy-tls\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.924998 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a55d5e63-14a1-4d53-be84-21dce9f0c53d-mcd-auth-proxy-config\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.925994 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a55d5e63-14a1-4d53-be84-21dce9f0c53d-mcd-auth-proxy-config\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.930459 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a55d5e63-14a1-4d53-be84-21dce9f0c53d-proxy-tls\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.968655 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-t8zfg" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.977736 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-fxswv" Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.989038 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97"} Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.998776 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" exitCode=0 Oct 11 02:46:53 crc kubenswrapper[4953]: I1011 02:46:53.998826 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} Oct 11 02:46:54 crc kubenswrapper[4953]: W1011 02:46:54.003722 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a114089_658e_442c_b755_9ca9b127f368.slice/crio-3d9256b0025950bd12e54f7c57210bbfa4fad933e772b58e9ac4088cfc959c45 WatchSource:0}: Error finding container 3d9256b0025950bd12e54f7c57210bbfa4fad933e772b58e9ac4088cfc959c45: Status 404 returned error can't find the container with id 3d9256b0025950bd12e54f7c57210bbfa4fad933e772b58e9ac4088cfc959c45 Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.009049 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.029525 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.047685 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.066037 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.082648 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.105301 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.125598 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.140675 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.160313 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.177511 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.191864 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.235512 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh2dx\" (UniqueName: \"kubernetes.io/projected/a55d5e63-14a1-4d53-be84-21dce9f0c53d-kube-api-access-rh2dx\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.242040 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh2dx\" (UniqueName: \"kubernetes.io/projected/a55d5e63-14a1-4d53-be84-21dce9f0c53d-kube-api-access-rh2dx\") pod \"machine-config-daemon-9jz9g\" (UID: \"a55d5e63-14a1-4d53-be84-21dce9f0c53d\") " pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.246589 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.289060 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:46:54 crc kubenswrapper[4953]: W1011 02:46:54.306351 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda55d5e63_14a1_4d53_be84_21dce9f0c53d.slice/crio-5fa956b1923d600c47e413b8012e0d1cf16c307cf61974939fd0111bd429718c WatchSource:0}: Error finding container 5fa956b1923d600c47e413b8012e0d1cf16c307cf61974939fd0111bd429718c: Status 404 returned error can't find the container with id 5fa956b1923d600c47e413b8012e0d1cf16c307cf61974939fd0111bd429718c Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.322001 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.340467 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.357231 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.371220 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.383919 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.402960 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.422239 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.444001 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.460801 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.475645 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.489666 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-gp27l"] Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.490150 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.494029 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.494255 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.494481 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.494574 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.494671 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.512419 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.541566 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.588561 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.622360 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.641239 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c909e377-5fa6-4647-b368-0e5436d9e407-serviceca\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.641308 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c909e377-5fa6-4647-b368-0e5436d9e407-host\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.641337 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzkb5\" (UniqueName: \"kubernetes.io/projected/c909e377-5fa6-4647-b368-0e5436d9e407-kube-api-access-kzkb5\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.660619 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.702836 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.742311 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c909e377-5fa6-4647-b368-0e5436d9e407-host\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.742443 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzkb5\" (UniqueName: \"kubernetes.io/projected/c909e377-5fa6-4647-b368-0e5436d9e407-kube-api-access-kzkb5\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.742488 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c909e377-5fa6-4647-b368-0e5436d9e407-host\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.742515 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c909e377-5fa6-4647-b368-0e5436d9e407-serviceca\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.744221 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c909e377-5fa6-4647-b368-0e5436d9e407-serviceca\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.750632 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.770709 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzkb5\" (UniqueName: \"kubernetes.io/projected/c909e377-5fa6-4647-b368-0e5436d9e407-kube-api-access-kzkb5\") pod \"node-ca-gp27l\" (UID: \"c909e377-5fa6-4647-b368-0e5436d9e407\") " pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.794418 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:54 crc kubenswrapper[4953]: E1011 02:46:54.794586 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.803092 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.813077 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-gp27l" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.840762 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.879728 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.927653 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:54 crc kubenswrapper[4953]: I1011 02:46:54.962532 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:54Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.005948 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.010440 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.010490 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.010512 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"5fa956b1923d600c47e413b8012e0d1cf16c307cf61974939fd0111bd429718c"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.012869 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerStarted","Data":"ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.012930 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerStarted","Data":"3d9256b0025950bd12e54f7c57210bbfa4fad933e772b58e9ac4088cfc959c45"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.018155 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.018195 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.018205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.018216 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.018226 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.018235 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.020241 4953 generic.go:334] "Generic (PLEG): container finished" podID="e359a42a-4046-4856-8936-b570d11fb061" containerID="37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad" exitCode=0 Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.020278 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerDied","Data":"37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.020373 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerStarted","Data":"5748ab13c95a112cf87f0866dfe4ac3449f333794a3cb67dd1eaaba310087040"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.023004 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gp27l" event={"ID":"c909e377-5fa6-4647-b368-0e5436d9e407","Type":"ContainerStarted","Data":"85e8376d230d5d52d033a686f1e4f0886d661ac52553964c6a4e26be66aaf124"} Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.056144 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.087223 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.122078 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.167021 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.204865 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.242048 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.280237 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.323934 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.373317 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.404415 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.444497 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.481734 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.528483 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.560539 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.603982 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.645522 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.682137 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.732701 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.794394 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:55 crc kubenswrapper[4953]: E1011 02:46:55.794574 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.794409 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:55 crc kubenswrapper[4953]: E1011 02:46:55.794786 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.938501 4953 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.941799 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.942039 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.942048 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.942152 4953 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.950256 4953 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.950678 4953 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.952830 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.952883 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.952897 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.952918 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.952932 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:55Z","lastTransitionTime":"2025-10-11T02:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:55 crc kubenswrapper[4953]: E1011 02:46:55.970710 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.975142 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.975206 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.975223 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.975246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.975256 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:55Z","lastTransitionTime":"2025-10-11T02:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:55 crc kubenswrapper[4953]: E1011 02:46:55.988131 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:55Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.993982 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.994031 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.994042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.994061 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:55 crc kubenswrapper[4953]: I1011 02:46:55.994079 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:55Z","lastTransitionTime":"2025-10-11T02:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: E1011 02:46:56.006578 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.010491 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.010533 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.010545 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.010565 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.010575 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: E1011 02:46:56.022618 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.027377 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.027436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.027449 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.027471 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.027491 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.028736 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-gp27l" event={"ID":"c909e377-5fa6-4647-b368-0e5436d9e407","Type":"ContainerStarted","Data":"bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.031803 4953 generic.go:334] "Generic (PLEG): container finished" podID="e359a42a-4046-4856-8936-b570d11fb061" containerID="d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af" exitCode=0 Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.031859 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerDied","Data":"d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.044360 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: E1011 02:46:56.045329 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: E1011 02:46:56.045535 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.047402 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.047453 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.047470 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.047494 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.047507 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.070631 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.095462 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.111311 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.124439 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.146101 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.153549 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.153595 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.153625 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.153646 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.153657 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.168672 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.182562 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.198981 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.215154 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.229824 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.242595 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.257266 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.257329 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.257342 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.257366 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.257381 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.284749 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.325143 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.360495 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.360529 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.360537 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.360553 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.360564 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.373684 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.406594 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.442146 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.463842 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.463885 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.463894 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.463910 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.463920 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.483100 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.526858 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.564745 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.566711 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.566751 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.566761 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.566784 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.566798 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.605957 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.643135 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.669521 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.669561 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.669572 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.669591 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.669618 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.682714 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.723317 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.770443 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.771701 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.771733 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.771744 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.771761 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.771774 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.794828 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:56 crc kubenswrapper[4953]: E1011 02:46:56.794976 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.804215 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.844520 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.875141 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.875199 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.875216 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.875239 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.875254 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.881185 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:56Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.978058 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.978110 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.978122 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.978154 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:56 crc kubenswrapper[4953]: I1011 02:46:56.978179 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:56Z","lastTransitionTime":"2025-10-11T02:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.045699 4953 generic.go:334] "Generic (PLEG): container finished" podID="e359a42a-4046-4856-8936-b570d11fb061" containerID="4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e" exitCode=0 Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.045790 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerDied","Data":"4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.074544 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.082546 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.082591 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.082681 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.082706 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.082720 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.092253 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.111406 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.126671 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.142668 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.162394 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.178424 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.185371 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.185419 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.185431 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.185448 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.185460 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.200630 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.240813 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.282710 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.288699 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.288742 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.288751 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.288768 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.288780 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.328548 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.361623 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.367838 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.368066 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:47:05.367999361 +0000 UTC m=+36.301087025 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.392317 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.392368 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.392381 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.392416 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.392430 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.408392 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.449043 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.469701 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.469782 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.469819 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.469852 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.469944 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470071 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470094 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.469971 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470145 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470079 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:05.470050516 +0000 UTC m=+36.403138340 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470243 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:05.47021309 +0000 UTC m=+36.403300744 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470292 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:05.470276611 +0000 UTC m=+36.403364275 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470338 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470395 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470423 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.470534 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:05.470504757 +0000 UTC m=+36.403592431 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.497868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.497952 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.497980 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.498009 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.498029 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.602385 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.602467 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.602542 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.602576 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.602597 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.706857 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.706927 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.706945 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.706973 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.706994 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.794943 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.795038 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.795184 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:46:57 crc kubenswrapper[4953]: E1011 02:46:57.795404 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.813451 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.813514 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.813531 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.813560 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.813579 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.918084 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.918159 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.918179 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.918211 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:57 crc kubenswrapper[4953]: I1011 02:46:57.918244 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:57Z","lastTransitionTime":"2025-10-11T02:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.021975 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.022045 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.022067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.022101 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.022130 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.055641 4953 generic.go:334] "Generic (PLEG): container finished" podID="e359a42a-4046-4856-8936-b570d11fb061" containerID="598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1" exitCode=0 Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.055671 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerDied","Data":"598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.068788 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.085565 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.104644 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.118933 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.125449 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.125506 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.125526 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.125555 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.125576 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.142386 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.164711 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.213225 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.230241 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.230303 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.230317 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.230338 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.230352 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.231858 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.250733 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.274984 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.290680 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.302693 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.312951 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.326814 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.334347 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.334388 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.334404 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.334427 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.334444 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.346544 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:58Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.437766 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.437847 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.437870 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.437928 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.437952 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.542033 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.542110 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.542129 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.542158 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.542179 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.645950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.646032 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.646049 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.646075 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.646097 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.749922 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.750001 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.750025 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.750063 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.750088 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.794870 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:46:58 crc kubenswrapper[4953]: E1011 02:46:58.795087 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.853497 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.853575 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.853592 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.853651 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.853670 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.956485 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.956653 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.956679 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.956707 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:58 crc kubenswrapper[4953]: I1011 02:46:58.956727 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:58Z","lastTransitionTime":"2025-10-11T02:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.060856 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.060926 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.060945 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.060971 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.060991 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.084482 4953 generic.go:334] "Generic (PLEG): container finished" podID="e359a42a-4046-4856-8936-b570d11fb061" containerID="d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630" exitCode=0 Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.084555 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerDied","Data":"d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.109870 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.164158 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.164849 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.164949 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.164976 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.165015 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.165049 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.199275 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.227736 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.251919 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.269354 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.269408 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.269426 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.269454 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.269469 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.271966 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.283899 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.302430 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.316184 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.330493 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.353597 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.373841 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.373884 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.373895 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.373985 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.374000 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.375292 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.395919 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.416446 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.476463 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.476524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.476543 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.476571 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.476591 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.578850 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.578880 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.578888 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.578903 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.578958 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.681377 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.681452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.681473 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.681509 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.681530 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.785793 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.785975 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.785996 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.786029 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.786048 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.795713 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:46:59 crc kubenswrapper[4953]: E1011 02:46:59.795947 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.796735 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:46:59 crc kubenswrapper[4953]: E1011 02:46:59.796902 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.825985 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.848596 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.872664 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.889519 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.889568 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.889585 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.889644 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.889664 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.900915 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.924207 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.943328 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.959846 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.984342 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:46:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.993548 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.993653 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.993674 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.993698 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:46:59 crc kubenswrapper[4953]: I1011 02:46:59.993717 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:46:59Z","lastTransitionTime":"2025-10-11T02:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.004484 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.022880 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.038872 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.051333 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.068341 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.088025 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.093891 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.094981 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.095052 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.097478 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.097506 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.097518 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.097534 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.097546 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.101481 4953 generic.go:334] "Generic (PLEG): container finished" podID="e359a42a-4046-4856-8936-b570d11fb061" containerID="9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289" exitCode=0 Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.101525 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerDied","Data":"9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.111530 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.127140 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.144732 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.163489 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.180056 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.192943 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.207049 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.207126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.207154 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.207191 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.207219 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.220684 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.243785 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.269250 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.283051 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.297148 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.307351 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.311966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.312045 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.312077 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.312112 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.312138 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.312453 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.313308 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.332759 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.359879 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.388972 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.403931 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.418025 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.418100 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.418119 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.418150 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.418174 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.423120 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.439734 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.454703 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.468576 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.490634 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.506113 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.520722 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.520913 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.520948 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.520960 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.520977 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.520988 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.532857 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.549937 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.563954 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.578176 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.592404 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.624610 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.624655 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.624665 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.624690 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.624706 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.727963 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.728027 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.728040 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.728062 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.728075 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.795120 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:00 crc kubenswrapper[4953]: E1011 02:47:00.795375 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.832078 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.832147 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.832165 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.832193 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.832211 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.936154 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.936209 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.936222 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.936243 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:00 crc kubenswrapper[4953]: I1011 02:47:00.936255 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:00Z","lastTransitionTime":"2025-10-11T02:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.040024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.040087 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.040105 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.040133 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.040153 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.111860 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" event={"ID":"e359a42a-4046-4856-8936-b570d11fb061","Type":"ContainerStarted","Data":"1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.111991 4953 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.134987 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.142782 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.142839 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.142859 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.142884 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.142903 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.159314 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.186208 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.231173 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.245953 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.246039 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.246063 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.246098 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.246122 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.255236 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.278575 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.299101 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.316520 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.352063 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.378081 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.379906 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.379952 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.379965 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.379985 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.379998 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.397464 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.417892 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.435296 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.452626 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:01Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.482808 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.482890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.482913 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.482942 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.482962 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.586251 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.586310 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.586321 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.586340 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.586350 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.690388 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.690441 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.690457 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.690484 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.690507 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799314 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799344 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799419 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799441 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799333 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799505 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: E1011 02:47:01.799551 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.799530 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:01 crc kubenswrapper[4953]: E1011 02:47:01.799790 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.903434 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.903524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.903544 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.903577 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:01 crc kubenswrapper[4953]: I1011 02:47:01.903595 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:01Z","lastTransitionTime":"2025-10-11T02:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.014122 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.014182 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.014206 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.014240 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.014260 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.116502 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.116575 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.116595 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.116668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.116693 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.117516 4953 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.219649 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.219726 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.219746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.219775 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.219795 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.322836 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.322901 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.322920 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.322950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.322968 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.425997 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.426843 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.426962 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.427071 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.427172 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.530112 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.530470 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.530552 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.530650 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.530716 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.574881 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.591065 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.608244 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.630486 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.635345 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.635446 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.635478 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.635518 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.635548 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.646216 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.666664 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.694621 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.722676 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.738759 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.738818 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.738828 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.738848 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.738859 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.742986 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.766069 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.779516 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.794961 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:02 crc kubenswrapper[4953]: E1011 02:47:02.795164 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.800973 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.815877 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.831243 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.841101 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.841154 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.841168 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.841186 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.841198 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.843087 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:02Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.944230 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.944277 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.944288 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.944309 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:02 crc kubenswrapper[4953]: I1011 02:47:02.944325 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:02Z","lastTransitionTime":"2025-10-11T02:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.047814 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.047877 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.047897 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.047923 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.047955 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.124240 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/0.log" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.127834 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14" exitCode=1 Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.127895 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.128815 4953 scope.go:117] "RemoveContainer" containerID="7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.152058 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.154672 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.154739 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.154762 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.154791 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.154810 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.173358 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.188350 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.207998 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.230272 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.246190 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.259228 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.259382 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.259405 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.259432 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.259453 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.259971 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.278769 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.295434 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.312511 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.330046 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.341469 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.359983 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.361841 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.361872 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.361890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.361912 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.361929 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.381518 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:03Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.463979 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.464022 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.464035 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.464056 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.464070 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.569695 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.569775 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.569791 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.569817 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.569843 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.672634 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.672684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.672736 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.672772 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.672785 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.782891 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.782956 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.782973 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.782997 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.783015 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.794627 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.794702 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:03 crc kubenswrapper[4953]: E1011 02:47:03.794839 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:03 crc kubenswrapper[4953]: E1011 02:47:03.795024 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.885976 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.886025 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.886036 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.886060 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.886072 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.989582 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.989649 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.989659 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.989677 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:03 crc kubenswrapper[4953]: I1011 02:47:03.989688 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:03Z","lastTransitionTime":"2025-10-11T02:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.094598 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.094674 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.094687 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.094710 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.094726 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.135675 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/0.log" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.140414 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.140640 4953 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.166788 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.190211 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.198209 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.198295 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.198316 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.198344 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.198400 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.215192 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.231311 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.254322 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.272333 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.301726 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.301797 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.301815 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.301847 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.301873 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.305959 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.326742 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.340557 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.354731 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.371226 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.391226 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.404446 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.404495 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.404509 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.404531 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.404549 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.410325 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.424726 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:04Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.508202 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.508307 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.508336 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.508371 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.508394 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.612119 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.612194 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.612217 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.612245 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.612264 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.716411 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.716485 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.716507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.716537 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.716558 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.794885 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:04 crc kubenswrapper[4953]: E1011 02:47:04.795121 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.820660 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.820735 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.820754 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.820783 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.820803 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.924645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.924722 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.924742 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.924772 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:04 crc kubenswrapper[4953]: I1011 02:47:04.924795 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:04Z","lastTransitionTime":"2025-10-11T02:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.030720 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.030785 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.030804 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.030835 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.030857 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.135036 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.135133 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.135158 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.135195 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.135221 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.147398 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/1.log" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.148769 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/0.log" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.153875 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97" exitCode=1 Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.153948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.154044 4953 scope.go:117] "RemoveContainer" containerID="7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.155393 4953 scope.go:117] "RemoveContainer" containerID="51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97" Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.155907 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.181976 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.202474 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.220305 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.239123 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.239177 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.239197 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.239224 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.239243 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.240410 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.266788 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.273684 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9"] Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.274505 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.279788 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.284794 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.303891 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.325341 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.342812 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.342859 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.342871 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.342890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.342905 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.349074 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.374474 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.385866 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.386019 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2b5m\" (UniqueName: \"kubernetes.io/projected/1356fafe-a703-47a2-8d51-f34303e06ee6-kube-api-access-d2b5m\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.386071 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1356fafe-a703-47a2-8d51-f34303e06ee6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.386112 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:47:21.386069252 +0000 UTC m=+52.319156946 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.386185 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1356fafe-a703-47a2-8d51-f34303e06ee6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.386283 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1356fafe-a703-47a2-8d51-f34303e06ee6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.397768 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.420890 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.436532 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.446031 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.446097 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.446119 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.446151 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.446175 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.464033 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487532 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1356fafe-a703-47a2-8d51-f34303e06ee6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487590 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487643 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1356fafe-a703-47a2-8d51-f34303e06ee6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487672 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487698 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487721 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2b5m\" (UniqueName: \"kubernetes.io/projected/1356fafe-a703-47a2-8d51-f34303e06ee6-kube-api-access-d2b5m\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487745 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.487773 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1356fafe-a703-47a2-8d51-f34303e06ee6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.487922 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488086 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:21.488046085 +0000 UTC m=+52.421133759 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488134 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488182 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488195 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488211 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488268 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488287 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:21.48826167 +0000 UTC m=+52.421349304 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488299 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488215 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488388 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:21.488358272 +0000 UTC m=+52.421446126 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.488471 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:21.488440074 +0000 UTC m=+52.421527878 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.488853 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1356fafe-a703-47a2-8d51-f34303e06ee6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.489580 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1356fafe-a703-47a2-8d51-f34303e06ee6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.497040 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.500267 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1356fafe-a703-47a2-8d51-f34303e06ee6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.520566 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2b5m\" (UniqueName: \"kubernetes.io/projected/1356fafe-a703-47a2-8d51-f34303e06ee6-kube-api-access-d2b5m\") pod \"ovnkube-control-plane-749d76644c-f7nz9\" (UID: \"1356fafe-a703-47a2-8d51-f34303e06ee6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.524166 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.545753 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.549682 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.549725 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.549740 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.549761 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.549777 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.568844 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.589487 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.606104 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.616146 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: W1011 02:47:05.628411 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1356fafe_a703_47a2_8d51_f34303e06ee6.slice/crio-8540abf095e6f7932c8f335788ed81a6fd76af8a7f493a093b42b5b1af64baa7 WatchSource:0}: Error finding container 8540abf095e6f7932c8f335788ed81a6fd76af8a7f493a093b42b5b1af64baa7: Status 404 returned error can't find the container with id 8540abf095e6f7932c8f335788ed81a6fd76af8a7f493a093b42b5b1af64baa7 Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.645812 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.656716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.656770 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.656785 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.656807 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.656832 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.663768 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.688191 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.713681 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.733253 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.760070 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.763971 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.764017 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.764030 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.764051 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.764066 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.792846 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.794822 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.794950 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.795416 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:05 crc kubenswrapper[4953]: E1011 02:47:05.795651 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.809684 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.827488 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.840811 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:05Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.867188 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.867287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.867346 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.867416 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.867487 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.970857 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.971123 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.971276 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.971404 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:05 crc kubenswrapper[4953]: I1011 02:47:05.971528 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:05Z","lastTransitionTime":"2025-10-11T02:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.075868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.075949 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.075972 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.076004 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.076033 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.162514 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/1.log" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.172039 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" event={"ID":"1356fafe-a703-47a2-8d51-f34303e06ee6","Type":"ContainerStarted","Data":"8540abf095e6f7932c8f335788ed81a6fd76af8a7f493a093b42b5b1af64baa7"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.179785 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.179846 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.179865 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.179890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.179910 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.252147 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.252347 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.252380 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.252472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.252509 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.280632 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.287818 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.287889 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.287907 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.287937 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.287957 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.310457 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.315491 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.315559 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.315588 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.315677 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.315704 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.338361 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.343853 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.343958 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.343985 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.344020 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.344043 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.367217 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.373116 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.373197 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.373218 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.373261 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.373302 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.399216 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.400063 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.403174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.403251 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.403277 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.403312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.403339 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.451576 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-bp9sq"] Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.452577 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.452760 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.477378 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.498941 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.502071 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.502191 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2btjm\" (UniqueName: \"kubernetes.io/projected/329460ba-d6c9-4774-b8d3-354e4406575c-kube-api-access-2btjm\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.507050 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.507106 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.507123 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.507152 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.507170 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.517508 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.543262 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.578421 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.600462 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.603023 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2btjm\" (UniqueName: \"kubernetes.io/projected/329460ba-d6c9-4774-b8d3-354e4406575c-kube-api-access-2btjm\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.603127 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.603324 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.603422 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:47:07.103398563 +0000 UTC m=+38.036486237 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.611004 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.611061 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.611083 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.611112 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.611135 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.624230 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.640042 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2btjm\" (UniqueName: \"kubernetes.io/projected/329460ba-d6c9-4774-b8d3-354e4406575c-kube-api-access-2btjm\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.646442 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.672964 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.701982 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.715246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.715310 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.715332 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.715364 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.715386 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.732399 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.759759 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.778738 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.794454 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:06 crc kubenswrapper[4953]: E1011 02:47:06.794641 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.802276 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.818645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.818692 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.818709 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.818730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.818746 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.822408 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.840162 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:06Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.922063 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.922125 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.922143 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.922174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:06 crc kubenswrapper[4953]: I1011 02:47:06.922197 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:06Z","lastTransitionTime":"2025-10-11T02:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.026248 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.026310 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.026324 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.026348 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.026362 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.109062 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:07 crc kubenswrapper[4953]: E1011 02:47:07.109392 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:07 crc kubenswrapper[4953]: E1011 02:47:07.109549 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:47:08.109517198 +0000 UTC m=+39.042604872 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.130115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.130191 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.130216 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.130252 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.130272 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.179544 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" event={"ID":"1356fafe-a703-47a2-8d51-f34303e06ee6","Type":"ContainerStarted","Data":"c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.179687 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" event={"ID":"1356fafe-a703-47a2-8d51-f34303e06ee6","Type":"ContainerStarted","Data":"37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.205734 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.232292 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.234493 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.234640 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.234664 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.234691 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.234711 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.268699 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.288403 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.316850 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.338922 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.339003 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.339031 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.339064 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.339090 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.352320 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.374979 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.396259 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.420167 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.440881 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.443121 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.443180 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.443199 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.443223 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.443244 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.466673 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.488019 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.508033 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.535065 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.546353 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.546424 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.546442 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.546472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.546491 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.561769 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.587066 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:07Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.650764 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.650823 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.650837 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.650858 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.650871 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.754648 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.754705 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.754717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.754736 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.754753 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.795701 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:07 crc kubenswrapper[4953]: E1011 02:47:07.795974 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.796728 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:07 crc kubenswrapper[4953]: E1011 02:47:07.796975 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.797072 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:07 crc kubenswrapper[4953]: E1011 02:47:07.797170 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.858188 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.858267 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.858286 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.858312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.858333 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.962418 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.962645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.962676 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.962707 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:07 crc kubenswrapper[4953]: I1011 02:47:07.962728 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:07Z","lastTransitionTime":"2025-10-11T02:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.066211 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.066273 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.066290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.066319 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.066337 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.123670 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:08 crc kubenswrapper[4953]: E1011 02:47:08.123930 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:08 crc kubenswrapper[4953]: E1011 02:47:08.124077 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:47:10.124047392 +0000 UTC m=+41.057135066 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.170018 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.170085 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.170108 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.170139 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.170161 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.274205 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.274267 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.274287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.274313 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.274329 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.378391 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.378465 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.378489 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.378524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.378547 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.482046 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.482140 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.482164 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.482198 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.482220 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.584824 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.584859 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.584868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.584883 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.584895 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.688492 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.688562 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.688578 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.688640 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.688662 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.794042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.794129 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.794148 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.794181 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.794223 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.794697 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:08 crc kubenswrapper[4953]: E1011 02:47:08.794967 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.897751 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.897850 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.897868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.897895 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:08 crc kubenswrapper[4953]: I1011 02:47:08.897915 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:08Z","lastTransitionTime":"2025-10-11T02:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.001388 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.001452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.001471 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.001499 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.001522 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.104369 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.104458 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.104480 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.104505 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.104524 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.209667 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.209730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.209749 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.209775 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.209799 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.313675 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.313746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.313764 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.313793 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.313816 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.417005 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.417056 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.417065 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.417087 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.417102 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.520643 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.520708 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.520729 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.520761 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.520784 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.624994 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.625092 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.625111 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.625149 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.625171 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.729236 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.729313 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.729331 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.729360 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.729381 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.794720 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.794911 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:09 crc kubenswrapper[4953]: E1011 02:47:09.795002 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.795105 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:09 crc kubenswrapper[4953]: E1011 02:47:09.795275 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:09 crc kubenswrapper[4953]: E1011 02:47:09.795524 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.823317 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.838389 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.838460 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.838481 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.838510 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.838531 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.860268 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7cf33b9ecd46463759c04e3bc3c7270086d3563a07dabd660b77f896d4792f14\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"message\\\":\\\"andler.go:208] Removed *v1.Pod event handler 3\\\\nI1011 02:47:02.806326 6241 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.806594 6241 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1011 02:47:02.806932 6241 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807024 6241 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807058 6241 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807091 6241 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1011 02:47:02.807199 6241 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1011 02:47:02.807868 6241 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.881406 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.908472 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.926470 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.941452 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.941916 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.941993 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.942016 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.942432 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.942690 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:09Z","lastTransitionTime":"2025-10-11T02:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.959750 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.976738 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:09 crc kubenswrapper[4953]: I1011 02:47:09.995243 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:09Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.022891 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.046452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.046889 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.047124 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.047288 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.047419 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.046900 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.067563 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.084068 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.104233 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.125890 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.144685 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:10Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.148459 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:10 crc kubenswrapper[4953]: E1011 02:47:10.148708 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:10 crc kubenswrapper[4953]: E1011 02:47:10.148858 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:47:14.148803265 +0000 UTC m=+45.081890949 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.152775 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.152841 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.152860 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.152887 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.152905 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.257816 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.257886 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.257904 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.257930 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.257949 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.361652 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.361729 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.361752 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.361787 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.361812 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.465486 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.465555 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.465575 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.465638 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.465663 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.568801 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.568941 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.568960 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.568986 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.569005 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.672662 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.672752 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.672780 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.672817 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.672843 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.776719 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.776789 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.776807 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.776839 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.776864 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.795174 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:10 crc kubenswrapper[4953]: E1011 02:47:10.795361 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.880880 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.880966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.880992 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.881024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.881047 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.984064 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.984507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.984691 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.984831 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:10 crc kubenswrapper[4953]: I1011 02:47:10.984980 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:10Z","lastTransitionTime":"2025-10-11T02:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.088026 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.088087 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.088097 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.088123 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.088136 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.195868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.195956 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.195985 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.196032 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.196058 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.300432 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.301042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.301222 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.301380 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.301515 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.406011 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.406062 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.406079 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.406105 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.406121 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.509526 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.509597 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.509660 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.509691 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.509713 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.612485 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.612558 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.612577 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.612654 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.612682 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.716166 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.716246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.716263 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.716293 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.716313 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.794721 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.794805 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:11 crc kubenswrapper[4953]: E1011 02:47:11.794917 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.794963 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:11 crc kubenswrapper[4953]: E1011 02:47:11.795125 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:11 crc kubenswrapper[4953]: E1011 02:47:11.795283 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.818728 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.819036 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.819116 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.819211 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.819272 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.922778 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.923246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.923322 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.923418 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:11 crc kubenswrapper[4953]: I1011 02:47:11.923487 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:11Z","lastTransitionTime":"2025-10-11T02:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.025960 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.026027 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.026047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.026075 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.026094 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.129400 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.129449 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.129464 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.129483 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.129494 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.232015 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.232084 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.232101 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.232125 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.232143 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.336017 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.336489 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.336689 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.336863 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.337007 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.440733 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.441275 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.441436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.441649 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.441902 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.545940 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.546024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.546052 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.546088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.546114 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.650759 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.650833 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.650862 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.650902 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.650929 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.754912 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.754988 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.755004 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.755032 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.755046 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.794518 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:12 crc kubenswrapper[4953]: E1011 02:47:12.794791 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.860745 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.860825 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.860844 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.860871 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.860890 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.988946 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.989022 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.989041 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.989066 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:12 crc kubenswrapper[4953]: I1011 02:47:12.989087 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:12Z","lastTransitionTime":"2025-10-11T02:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.092579 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.092684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.092717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.092749 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.092768 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.196975 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.197049 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.197074 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.197176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.197202 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.301128 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.301189 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.301207 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.301236 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.301256 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.409183 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.409253 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.409273 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.409306 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.409327 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.512292 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.512345 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.512359 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.512383 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.512398 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.616348 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.616432 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.616457 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.616494 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.616518 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.719796 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.719866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.719886 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.719910 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.719926 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.794678 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.794733 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.794710 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:13 crc kubenswrapper[4953]: E1011 02:47:13.794943 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:13 crc kubenswrapper[4953]: E1011 02:47:13.795204 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:13 crc kubenswrapper[4953]: E1011 02:47:13.795402 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.823045 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.823091 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.823103 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.823120 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.823132 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.926336 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.926400 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.926421 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.926455 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:13 crc kubenswrapper[4953]: I1011 02:47:13.926481 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:13Z","lastTransitionTime":"2025-10-11T02:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.029641 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.029696 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.029708 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.029728 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.029742 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.133846 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.133889 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.133899 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.133916 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.133927 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.200253 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:14 crc kubenswrapper[4953]: E1011 02:47:14.200466 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:14 crc kubenswrapper[4953]: E1011 02:47:14.200559 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:47:22.200535037 +0000 UTC m=+53.133622701 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.237813 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.237886 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.237903 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.237932 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.237952 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.341419 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.341482 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.341499 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.341524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.341544 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.445284 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.445352 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.445373 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.445401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.445424 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.549269 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.549312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.549325 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.549342 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.549354 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.654493 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.654563 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.654581 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.654633 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.654652 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.758447 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.758511 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.758525 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.758547 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.758562 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.795356 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:14 crc kubenswrapper[4953]: E1011 02:47:14.795578 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.861971 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.862025 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.862039 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.862061 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.862074 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.965125 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.965196 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.965221 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.965255 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:14 crc kubenswrapper[4953]: I1011 02:47:14.965280 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:14Z","lastTransitionTime":"2025-10-11T02:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.069692 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.069744 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.069755 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.069776 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.069792 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.173579 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.173643 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.173655 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.173670 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.173682 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.276858 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.276928 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.276945 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.276972 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.276995 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.380383 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.380452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.380471 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.380503 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.380526 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.484969 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.485029 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.485047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.485072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.485090 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.587827 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.587898 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.587922 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.587950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.588004 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.691388 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.691492 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.691516 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.691547 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.691565 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.794392 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.794392 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.794561 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:15 crc kubenswrapper[4953]: E1011 02:47:15.794595 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:15 crc kubenswrapper[4953]: E1011 02:47:15.794854 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:15 crc kubenswrapper[4953]: E1011 02:47:15.795021 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.801172 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.801247 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.801275 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.801328 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.801355 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.904252 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.904312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.904328 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.904351 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:15 crc kubenswrapper[4953]: I1011 02:47:15.904366 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:15Z","lastTransitionTime":"2025-10-11T02:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.007814 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.007887 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.007908 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.007934 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.007952 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.111322 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.111408 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.111437 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.111473 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.111498 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.215565 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.215699 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.215724 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.215758 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.215779 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.320410 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.320474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.320493 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.320515 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.320530 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.424029 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.424100 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.424114 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.424143 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.424160 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.528391 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.528478 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.528501 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.528535 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.528560 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.584267 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.584333 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.584345 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.584369 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.584382 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.603682 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:16Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.609045 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.609090 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.609102 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.609174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.609192 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.625086 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:16Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.630068 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.630112 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.630121 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.630141 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.630153 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.643319 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:16Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.649688 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.649744 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.649761 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.649787 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.649806 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.667572 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:16Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.674717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.674797 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.674822 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.674856 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.674881 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.696627 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:16Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.696809 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.699198 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.699268 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.699287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.699315 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.699335 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.794838 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:16 crc kubenswrapper[4953]: E1011 02:47:16.795041 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.802934 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.802993 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.803014 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.803043 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.803061 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.906046 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.906117 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.906142 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.906176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:16 crc kubenswrapper[4953]: I1011 02:47:16.906200 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:16Z","lastTransitionTime":"2025-10-11T02:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.010249 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.010306 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.010322 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.010346 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.010364 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.043943 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.045054 4953 scope.go:117] "RemoveContainer" containerID="51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.070151 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.093819 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.113481 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.113546 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.113560 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.113577 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.113588 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.117514 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.138746 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.155564 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.174573 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.193486 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.211452 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.218513 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.218568 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.218581 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.218633 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.218650 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.228114 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.245440 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.263899 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.286246 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.317323 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.321287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.321343 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.321361 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.321385 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.321401 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.340701 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.362683 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.377125 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.424170 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.424419 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.424433 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.424457 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.424472 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.528516 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.528595 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.528680 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.528716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.528737 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.582066 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.595867 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.597998 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.620488 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.631412 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.631472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.631485 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.631510 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.631524 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.643469 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.656957 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.672311 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.690763 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.705528 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.724223 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.734242 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.734306 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.734330 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.734359 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.734378 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.748139 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.776127 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.792964 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.795244 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.795303 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:17 crc kubenswrapper[4953]: E1011 02:47:17.795404 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:17 crc kubenswrapper[4953]: E1011 02:47:17.795468 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.795573 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:17 crc kubenswrapper[4953]: E1011 02:47:17.795675 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.815704 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.836696 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.836738 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.836749 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.836772 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.836782 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.837926 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.854011 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.869242 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.882308 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:17Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.940085 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.940128 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.940141 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.940163 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:17 crc kubenswrapper[4953]: I1011 02:47:17.940177 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:17Z","lastTransitionTime":"2025-10-11T02:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.043391 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.043467 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.043482 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.043505 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.043523 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.146963 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.147018 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.147031 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.147072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.147086 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.231741 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/2.log" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.232196 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/1.log" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.234660 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187" exitCode=1 Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.235485 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.235547 4953 scope.go:117] "RemoveContainer" containerID="51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.236102 4953 scope.go:117] "RemoveContainer" containerID="b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187" Oct 11 02:47:18 crc kubenswrapper[4953]: E1011 02:47:18.236236 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.251129 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.251185 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.251203 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.251247 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.251268 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.258788 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.281801 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.297310 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.316590 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.333856 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.351728 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.355223 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.355369 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.355401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.355860 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.355907 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.379776 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.411764 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.436204 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.458850 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.460019 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.460072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.460091 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.460126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.460146 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.490869 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.515661 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.539569 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.559102 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.563901 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.563963 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.563987 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.564014 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.564033 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.584409 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.603982 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.621113 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:18Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.667040 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.667381 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.667518 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.667634 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.667740 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.774656 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.774728 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.774746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.774776 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.774798 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.794702 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:18 crc kubenswrapper[4953]: E1011 02:47:18.794905 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.878779 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.878833 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.878854 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.878882 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.878902 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.983039 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.983097 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.983108 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.983126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:18 crc kubenswrapper[4953]: I1011 02:47:18.983138 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:18Z","lastTransitionTime":"2025-10-11T02:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.087550 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.087866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.087895 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.087933 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.087964 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.191153 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.191216 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.191230 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.191252 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.191265 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.243303 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/2.log" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.295394 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.295480 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.295499 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.295530 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.295554 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.401007 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.401116 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.401146 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.401191 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.401220 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.509066 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.509120 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.509135 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.509156 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.509170 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.611942 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.611988 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.612000 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.612021 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.612033 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.716468 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.716509 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.716519 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.716540 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.716553 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.795035 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.795066 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.795172 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:19 crc kubenswrapper[4953]: E1011 02:47:19.795238 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:19 crc kubenswrapper[4953]: E1011 02:47:19.795378 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:19 crc kubenswrapper[4953]: E1011 02:47:19.795575 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.816642 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.818884 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.818949 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.818966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.818996 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.819015 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.836022 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.858113 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.874901 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.900406 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.922513 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.922653 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.922717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.922779 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.922851 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:19Z","lastTransitionTime":"2025-10-11T02:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.926342 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.944379 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.961745 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:19 crc kubenswrapper[4953]: I1011 02:47:19.980794 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:19Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.004578 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.025131 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.026467 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.026530 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.026553 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.026586 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.026645 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.046522 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.066189 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.089357 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.115507 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.129863 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.129939 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.129959 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.129990 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.130012 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.146673 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.181287 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51689b2d8ec2eb3e3a3d1aeae2ad565bee2b6670ac19199a61e837f46301ca97\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:04Z\\\",\\\"message\\\":\\\"Map:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:04.170413 6414 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-apiserver/check-endpoints]} name:Service_openshift-apiserver/check-endpoints_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.139:17698:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8efa4d1a-72f5-4dfa-9bc2-9d93ef11ecf2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1011 02:47:04.174240 6414 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:20Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.232970 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.233015 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.233028 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.233047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.233060 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.336526 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.336622 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.336642 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.336671 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.336692 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.439067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.439137 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.439155 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.439183 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.439201 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.541903 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.541961 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.541981 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.542004 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.542021 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.644907 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.644966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.644984 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.645007 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.645022 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.748565 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.748682 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.748710 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.748745 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.748773 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.795019 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:20 crc kubenswrapper[4953]: E1011 02:47:20.795266 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.852423 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.852479 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.852498 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.852525 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.852539 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.956489 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.956639 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.956652 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.956677 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:20 crc kubenswrapper[4953]: I1011 02:47:20.956690 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:20Z","lastTransitionTime":"2025-10-11T02:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.060293 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.060363 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.060376 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.060407 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.060424 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.164551 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.164659 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.164678 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.164710 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.164731 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.267856 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.267945 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.267965 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.267994 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.268013 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.371007 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.371072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.371094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.371135 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.371159 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.404658 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.404936 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:47:53.404889713 +0000 UTC m=+84.337977417 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.474474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.474539 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.474560 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.474585 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.474636 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.507214 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.507342 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.507423 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.507791 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.507881 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.507951 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.507982 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.508110 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:53.508074463 +0000 UTC m=+84.441162147 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.508521 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.508728 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:53.508683268 +0000 UTC m=+84.441770942 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.513220 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.513288 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.513328 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.513710 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:53.513665235 +0000 UTC m=+84.446752919 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.513989 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.514317 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:47:53.514282551 +0000 UTC m=+84.447370235 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.580360 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.580434 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.580452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.580505 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.580524 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.683472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.683532 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.683541 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.683557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.683585 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.787211 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.787280 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.787290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.787311 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.787322 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.794745 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.794833 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.794893 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.794836 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.794973 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:21 crc kubenswrapper[4953]: E1011 02:47:21.795139 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.890023 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.890094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.890111 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.890132 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.890146 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.993697 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.993782 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.993805 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.993840 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:21 crc kubenswrapper[4953]: I1011 02:47:21.993930 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:21Z","lastTransitionTime":"2025-10-11T02:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.097434 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.097507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.097534 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.097571 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.097597 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.200882 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.200958 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.200978 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.201008 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.201028 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.217448 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:22 crc kubenswrapper[4953]: E1011 02:47:22.217719 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:22 crc kubenswrapper[4953]: E1011 02:47:22.217861 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:47:38.217822915 +0000 UTC m=+69.150910599 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.305233 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.305321 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.305344 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.305380 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.305403 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.409801 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.409867 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.409889 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.409917 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.409958 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.514771 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.514845 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.514868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.514901 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.514921 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.619173 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.619239 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.619258 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.619290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.619309 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.722480 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.722538 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.722562 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.722593 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.722662 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.795432 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:22 crc kubenswrapper[4953]: E1011 02:47:22.795688 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.826806 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.826880 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.826898 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.826931 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.826951 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.930410 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.930484 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.930507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.930908 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:22 crc kubenswrapper[4953]: I1011 02:47:22.931217 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:22Z","lastTransitionTime":"2025-10-11T02:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.034711 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.034779 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.034797 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.035204 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.035256 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.138950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.139032 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.139051 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.139122 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.139144 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.243374 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.243452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.243473 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.243504 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.243525 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.347343 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.347407 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.347424 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.347450 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.347469 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.451002 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.451073 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.451092 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.451122 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.451164 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.554684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.554781 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.554805 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.554840 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.554865 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.664143 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.664216 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.664251 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.664328 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.664352 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.768745 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.768847 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.768950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.769406 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.769647 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.795049 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.795036 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:23 crc kubenswrapper[4953]: E1011 02:47:23.795347 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:23 crc kubenswrapper[4953]: E1011 02:47:23.795535 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.794912 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:23 crc kubenswrapper[4953]: E1011 02:47:23.796063 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.873049 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.873134 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.873160 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.873194 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.873218 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.977532 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.977631 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.977654 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.977684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:23 crc kubenswrapper[4953]: I1011 02:47:23.977705 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:23Z","lastTransitionTime":"2025-10-11T02:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.080960 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.081019 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.081035 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.081062 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.081081 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.184774 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.185141 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.185154 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.185176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.185192 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.288407 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.288478 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.288501 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.288530 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.288550 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.392667 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.392746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.392769 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.392798 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.392817 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.496501 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.496668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.496692 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.496719 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.496736 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.600720 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.600772 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.600792 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.600819 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.600839 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.704810 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.704888 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.704909 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.704941 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.704959 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.795165 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:24 crc kubenswrapper[4953]: E1011 02:47:24.795437 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.814527 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.814645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.814672 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.814704 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.814730 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.918374 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.918443 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.918462 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.918491 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:24 crc kubenswrapper[4953]: I1011 02:47:24.918510 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:24Z","lastTransitionTime":"2025-10-11T02:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.022470 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.022552 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.022573 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.022643 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.022665 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.126002 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.126064 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.126084 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.126113 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.126131 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.230866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.230955 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.230981 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.231014 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.231037 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.334807 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.334892 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.334916 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.334950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.334975 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.438866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.438967 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.438994 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.439035 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.439061 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.544089 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.544191 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.544244 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.544282 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.544338 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.648431 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.648517 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.648539 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.648674 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.648704 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.754904 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.754966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.754983 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.755011 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.755033 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.795155 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.795230 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.795220 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:25 crc kubenswrapper[4953]: E1011 02:47:25.795362 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:25 crc kubenswrapper[4953]: E1011 02:47:25.795501 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:25 crc kubenswrapper[4953]: E1011 02:47:25.795706 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.858719 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.858835 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.858848 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.858872 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.858886 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.962892 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.962962 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.962974 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.962999 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:25 crc kubenswrapper[4953]: I1011 02:47:25.963012 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:25Z","lastTransitionTime":"2025-10-11T02:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.066189 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.066273 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.066292 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.066324 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.066346 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.169357 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.169429 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.169448 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.169477 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.169496 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.272708 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.272777 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.272795 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.272824 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.272842 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.376273 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.376353 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.376372 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.376405 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.376425 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.479577 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.479708 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.479734 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.479810 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.479838 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.583497 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.583676 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.583697 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.583726 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.583744 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.686853 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.686921 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.686943 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.686970 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.686990 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.790355 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.790436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.790458 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.790493 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.790512 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.794519 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:26 crc kubenswrapper[4953]: E1011 02:47:26.794685 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.844976 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.845041 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.845054 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.845079 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.845093 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: E1011 02:47:26.868064 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:26Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.875974 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.876120 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.876140 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.876174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.876233 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: E1011 02:47:26.896132 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:26Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.902104 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.902253 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.902339 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.902375 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.902399 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: E1011 02:47:26.924833 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:26Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.929687 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.929742 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.929752 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.929770 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.929783 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:26 crc kubenswrapper[4953]: E1011 02:47:26.946233 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:26Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.994348 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.994451 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.994471 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.994535 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:26 crc kubenswrapper[4953]: I1011 02:47:26.994556 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:26Z","lastTransitionTime":"2025-10-11T02:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: E1011 02:47:27.017636 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:27Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:27 crc kubenswrapper[4953]: E1011 02:47:27.017969 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.020251 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.020332 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.020357 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.020388 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.020416 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.123133 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.123231 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.123251 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.123282 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.123301 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.226652 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.226715 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.226732 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.226942 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.226957 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.330290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.330352 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.330370 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.330399 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.330418 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.433886 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.433935 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.433945 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.433965 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.433977 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.538284 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.538397 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.538425 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.538487 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.538514 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.642663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.642776 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.642796 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.642874 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.642948 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.747004 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.747124 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.747183 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.747213 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.747235 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.795191 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.795230 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.795232 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:27 crc kubenswrapper[4953]: E1011 02:47:27.795377 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:27 crc kubenswrapper[4953]: E1011 02:47:27.795539 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:27 crc kubenswrapper[4953]: E1011 02:47:27.795783 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.851233 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.851350 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.851376 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.851406 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.851427 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.955765 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.955847 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.955866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.955903 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:27 crc kubenswrapper[4953]: I1011 02:47:27.955924 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:27Z","lastTransitionTime":"2025-10-11T02:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.059436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.059566 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.059587 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.059671 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.059699 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.163189 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.163272 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.163293 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.163326 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.163346 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.265936 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.265980 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.265989 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.266006 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.266021 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.369701 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.369769 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.369794 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.369824 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.369844 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.473094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.473156 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.473174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.473200 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.473220 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.579277 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.579350 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.579374 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.579403 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.579426 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.682825 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.682899 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.682922 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.682952 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.682976 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.787052 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.787120 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.787140 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.787168 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.787188 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.795295 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:28 crc kubenswrapper[4953]: E1011 02:47:28.795463 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.796943 4953 scope.go:117] "RemoveContainer" containerID="b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187" Oct 11 02:47:28 crc kubenswrapper[4953]: E1011 02:47:28.797284 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.832325 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.857715 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.877776 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.890385 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.890575 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.890700 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.890849 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.890877 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.894431 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.918369 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.938743 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.959435 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.981172 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:28Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.995532 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.995648 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.995674 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.995709 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:28 crc kubenswrapper[4953]: I1011 02:47:28.995743 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:28Z","lastTransitionTime":"2025-10-11T02:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.003862 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.024714 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.041878 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.058018 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.072644 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.085851 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.098953 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.099008 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.099025 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.099052 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.099072 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.106802 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.126373 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.144873 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.202540 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.202633 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.202656 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.202685 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.202705 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.305728 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.305799 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.305818 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.305845 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.305868 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.409746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.409857 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.409876 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.409906 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.409926 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.513653 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.513707 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.513729 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.513762 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.513783 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.617632 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.617730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.617749 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.617777 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.617792 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.721411 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.721507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.721528 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.721557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.721576 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.795048 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.795294 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:29 crc kubenswrapper[4953]: E1011 02:47:29.795348 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.795063 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:29 crc kubenswrapper[4953]: E1011 02:47:29.795643 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:29 crc kubenswrapper[4953]: E1011 02:47:29.795751 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.820420 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.825526 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.825638 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.825668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.825710 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.825736 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.843275 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.867640 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.888071 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.910266 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.928911 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.928983 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.929000 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.929027 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.929046 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:29Z","lastTransitionTime":"2025-10-11T02:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.929521 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.954101 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.975802 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:29 crc kubenswrapper[4953]: I1011 02:47:29.994870 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:29Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.019425 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.032794 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.032860 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.032879 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.032912 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.032933 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.054183 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.073706 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.107643 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.128973 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.139211 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.139420 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.139563 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.139793 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.139939 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.144818 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.162982 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.180022 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:30Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.242417 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.242474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.242490 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.242515 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.242531 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.346853 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.347018 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.347042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.347070 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.347093 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.450182 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.450242 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.450260 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.450290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.450313 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.554096 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.554172 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.554193 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.554219 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.554237 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.657056 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.657122 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.657137 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.657156 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.657170 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.761235 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.761307 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.761331 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.761363 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.761382 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.795511 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:30 crc kubenswrapper[4953]: E1011 02:47:30.795783 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.865037 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.865094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.865111 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.865137 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.865155 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.969465 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.969541 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.969564 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.969645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:30 crc kubenswrapper[4953]: I1011 02:47:30.969674 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:30Z","lastTransitionTime":"2025-10-11T02:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.073742 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.073817 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.073836 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.073867 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.073894 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.177030 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.177077 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.177088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.177111 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.177123 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.280686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.280750 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.280767 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.280792 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.280813 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.384508 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.384564 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.384583 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.384637 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.384655 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.488463 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.488573 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.488595 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.488661 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.488684 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.592390 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.592456 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.592474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.592502 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.592520 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.696272 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.696392 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.696415 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.696447 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.696467 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.794598 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.794678 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.794678 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:31 crc kubenswrapper[4953]: E1011 02:47:31.795112 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:31 crc kubenswrapper[4953]: E1011 02:47:31.795278 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:31 crc kubenswrapper[4953]: E1011 02:47:31.795438 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.800351 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.800429 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.800464 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.800557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.800652 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.905042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.905150 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.905185 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.905221 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:31 crc kubenswrapper[4953]: I1011 02:47:31.905245 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:31Z","lastTransitionTime":"2025-10-11T02:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.008936 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.009068 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.009103 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.009139 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.009163 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.112813 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.112880 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.112901 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.112934 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.112962 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.215491 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.215524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.215535 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.215553 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.215566 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.318042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.318142 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.318161 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.318188 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.318207 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.421574 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.421670 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.421683 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.421709 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.421723 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.525803 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.525871 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.525885 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.525913 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.525928 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.630972 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.631056 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.631081 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.631113 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.631136 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.735215 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.735676 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.735694 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.735721 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.735739 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.794800 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:32 crc kubenswrapper[4953]: E1011 02:47:32.794991 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.839433 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.839472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.839483 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.839506 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.839520 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.942565 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.942657 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.942675 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.942705 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:32 crc kubenswrapper[4953]: I1011 02:47:32.942753 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:32Z","lastTransitionTime":"2025-10-11T02:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.045028 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.045058 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.045067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.045082 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.045092 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.148012 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.148069 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.148088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.148115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.148136 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.251290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.251369 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.251387 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.251417 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.251438 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.355787 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.355869 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.355896 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.355935 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.355969 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.459806 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.459866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.459882 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.459910 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.459928 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.564047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.564099 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.564118 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.564148 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.564168 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.667167 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.667681 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.667869 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.668006 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.668148 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.771561 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.771667 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.771686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.771716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.771735 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.795921 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.796109 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.795930 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:33 crc kubenswrapper[4953]: E1011 02:47:33.796315 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:33 crc kubenswrapper[4953]: E1011 02:47:33.796436 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:33 crc kubenswrapper[4953]: E1011 02:47:33.796531 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.875245 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.875319 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.875341 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.875377 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.875404 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.979857 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.980315 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.980479 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.980663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:33 crc kubenswrapper[4953]: I1011 02:47:33.980907 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:33Z","lastTransitionTime":"2025-10-11T02:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.084895 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.084972 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.084989 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.085024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.085045 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.187976 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.188439 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.188594 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.188793 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.188948 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.292013 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.292117 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.292140 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.292172 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.292191 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.394974 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.395013 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.395024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.395043 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.395054 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.498463 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.498515 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.498529 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.498554 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.498572 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.601844 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.602003 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.602027 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.602086 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.602324 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.706217 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.706288 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.706307 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.706346 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.706366 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.795036 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:34 crc kubenswrapper[4953]: E1011 02:47:34.795212 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.815126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.815202 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.815220 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.815250 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.815269 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.918312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.918391 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.918413 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.918443 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:34 crc kubenswrapper[4953]: I1011 02:47:34.918466 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:34Z","lastTransitionTime":"2025-10-11T02:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.022295 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.022376 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.022396 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.022481 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.022504 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.126394 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.126468 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.126489 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.126519 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.126540 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.229162 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.229212 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.229224 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.229246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.229259 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.332330 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.332392 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.332408 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.332436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.332452 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.436064 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.436228 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.436248 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.436277 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.436293 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.540716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.540779 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.540790 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.540812 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.540825 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.643734 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.643810 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.643875 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.643907 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.643928 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.747287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.747353 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.747367 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.747389 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.747403 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.795299 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.795349 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.795352 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:35 crc kubenswrapper[4953]: E1011 02:47:35.795566 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:35 crc kubenswrapper[4953]: E1011 02:47:35.795825 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:35 crc kubenswrapper[4953]: E1011 02:47:35.795927 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.850125 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.850231 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.850258 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.850297 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.850326 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.953641 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.953694 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.953707 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.953726 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:35 crc kubenswrapper[4953]: I1011 02:47:35.953736 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:35Z","lastTransitionTime":"2025-10-11T02:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.057044 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.057130 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.057147 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.057176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.057200 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.160834 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.160932 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.160956 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.160998 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.161025 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.264262 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.264323 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.264332 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.264356 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.264365 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.367199 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.367281 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.367303 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.367333 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.367353 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.470962 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.471090 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.471110 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.471141 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.471164 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.573883 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.573957 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.573976 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.574003 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.574022 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.677245 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.677332 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.677356 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.677392 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.677416 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.781050 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.781097 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.781109 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.781128 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.781139 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.794707 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:36 crc kubenswrapper[4953]: E1011 02:47:36.794896 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.885048 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.885123 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.885142 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.885171 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.885191 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.988545 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.988676 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.988697 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.988728 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:36 crc kubenswrapper[4953]: I1011 02:47:36.988753 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:36Z","lastTransitionTime":"2025-10-11T02:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.092663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.092744 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.092771 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.092806 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.092828 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.196672 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.196727 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.196738 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.196814 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.196828 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.299800 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.299840 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.299851 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.299866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.299876 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.314942 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.315028 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.315048 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.315577 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.315666 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.336432 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:37Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.342487 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.342564 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.342590 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.342660 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.342717 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.363639 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:37Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.369270 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.369335 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.369353 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.369380 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.369406 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.388819 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:37Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.395681 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.395741 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.395760 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.395790 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.395810 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.416286 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:37Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.422925 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.423015 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.423042 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.423153 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.423220 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.446387 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:37Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.446805 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.449450 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.449549 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.449576 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.449644 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.449672 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.552975 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.553039 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.553055 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.553080 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.553095 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.655971 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.656022 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.656030 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.656049 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.656059 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.758347 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.758400 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.758415 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.758436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.758450 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.794721 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.794739 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.794887 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.795099 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.795256 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:37 crc kubenswrapper[4953]: E1011 02:47:37.795391 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.861875 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.861938 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.861948 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.861972 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.861987 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.965062 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.965115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.965125 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.965144 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:37 crc kubenswrapper[4953]: I1011 02:47:37.965156 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:37Z","lastTransitionTime":"2025-10-11T02:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.068363 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.068417 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.068428 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.068447 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.068457 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.171319 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.171375 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.171389 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.171412 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.171429 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.242292 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:38 crc kubenswrapper[4953]: E1011 02:47:38.242464 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:38 crc kubenswrapper[4953]: E1011 02:47:38.242540 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:48:10.242521162 +0000 UTC m=+101.175608806 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.273910 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.273957 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.273971 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.273990 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.274001 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.376850 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.376928 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.376947 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.376978 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.377005 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.479629 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.479691 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.479703 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.479721 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.479731 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.582699 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.582748 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.582758 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.582775 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.582787 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.686151 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.686223 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.686237 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.686259 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.686277 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.790231 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.790298 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.790377 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.790408 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.790429 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.795019 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:38 crc kubenswrapper[4953]: E1011 02:47:38.795258 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.895195 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.895282 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.895309 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.895348 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.895371 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.998999 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.999064 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.999085 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:38 crc kubenswrapper[4953]: I1011 02:47:38.999117 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:38.999141 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:38Z","lastTransitionTime":"2025-10-11T02:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.102189 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.102289 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.102312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.102346 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.102369 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.206199 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.206265 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.206279 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.206302 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.206315 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.309592 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.309645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.309655 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.309673 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.309682 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.412782 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.412847 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.412865 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.412890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.412911 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.516086 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.516150 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.516166 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.516190 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.516207 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.618622 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.618680 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.618689 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.618710 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.618723 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.721899 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.721951 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.721963 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.721979 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.721989 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.794931 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.794996 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:39 crc kubenswrapper[4953]: E1011 02:47:39.795147 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:39 crc kubenswrapper[4953]: E1011 02:47:39.795289 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.795495 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:39 crc kubenswrapper[4953]: E1011 02:47:39.795688 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.819202 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.825507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.825589 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.825659 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.825697 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.825727 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.840090 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.856069 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.875207 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.888705 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.902501 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.916255 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.929162 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.929235 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.929256 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.929287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.929310 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:39Z","lastTransitionTime":"2025-10-11T02:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.934089 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.953315 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.968418 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:39 crc kubenswrapper[4953]: I1011 02:47:39.985134 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:39Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.009940 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:40Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.032351 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.032398 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.032412 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.032430 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.032444 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.042033 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:40Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.063576 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:40Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.083893 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:40Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.107224 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:40Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.120979 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:40Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.135838 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.135877 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.135889 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.135907 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.135919 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.241713 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.241795 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.241821 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.241860 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.241897 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.345115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.345162 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.345176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.345196 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.345210 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.448075 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.448173 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.448200 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.448238 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.448268 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.552929 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.553013 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.553034 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.553067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.553091 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.656509 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.656555 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.656565 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.656581 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.656592 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.759993 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.760053 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.760067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.760094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.760111 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.794598 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:40 crc kubenswrapper[4953]: E1011 02:47:40.794815 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.863088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.863144 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.863200 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.863226 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.863239 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.966170 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.966212 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.966223 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.966245 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:40 crc kubenswrapper[4953]: I1011 02:47:40.966259 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:40Z","lastTransitionTime":"2025-10-11T02:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.069145 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.069202 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.069214 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.069248 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.069261 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.171894 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.171930 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.171940 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.171955 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.171965 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.274637 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.274681 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.274691 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.274706 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.274716 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.338305 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/0.log" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.338358 4953 generic.go:334] "Generic (PLEG): container finished" podID="5a114089-658e-442c-b755-9ca9b127f368" containerID="ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02" exitCode=1 Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.338419 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerDied","Data":"ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.339145 4953 scope.go:117] "RemoveContainer" containerID="ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.356180 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.371047 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.379545 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.379663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.379692 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.379731 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.379758 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.390547 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.402219 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.420263 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.438170 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.454075 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.469674 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.481974 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.482469 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.482494 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.482504 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.482525 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.482538 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.495182 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.508210 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.522643 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.536624 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.551273 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.564433 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.583723 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.585557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.585646 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.585666 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.585690 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.585708 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.623550 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:41Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.689367 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.689427 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.689439 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.689460 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.689473 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.793639 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.793696 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.793705 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.793725 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.793738 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.794474 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.794540 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:41 crc kubenswrapper[4953]: E1011 02:47:41.794633 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.794784 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:41 crc kubenswrapper[4953]: E1011 02:47:41.794873 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:41 crc kubenswrapper[4953]: E1011 02:47:41.795343 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.897239 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.897301 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.897315 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.897342 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:41 crc kubenswrapper[4953]: I1011 02:47:41.897359 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:41Z","lastTransitionTime":"2025-10-11T02:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.000117 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.000166 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.000179 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.000202 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.000217 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.102825 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.102902 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.102920 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.102946 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.102967 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.206088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.206163 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.206181 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.206209 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.206228 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.309573 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.309711 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.309739 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.309775 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.309801 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.344195 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/0.log" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.344295 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerStarted","Data":"c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.369476 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.390264 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.412226 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.412300 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.412315 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.412342 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.412356 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.413031 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.431536 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.458552 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.476001 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.495915 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.513375 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.515047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.515080 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.515092 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.515110 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.515121 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.528826 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.551418 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.610983 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.617354 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.617401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.617410 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.617429 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.617439 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.626828 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.641476 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.653957 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.664023 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.677833 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.700190 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:42Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.720017 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.720070 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.720082 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.720100 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.720115 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.794840 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:42 crc kubenswrapper[4953]: E1011 02:47:42.795359 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.795635 4953 scope.go:117] "RemoveContainer" containerID="b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.826991 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.827072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.827095 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.827126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.827147 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.934447 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.934511 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.934531 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.934558 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:42 crc kubenswrapper[4953]: I1011 02:47:42.934579 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:42Z","lastTransitionTime":"2025-10-11T02:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.038327 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.038375 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.038388 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.038410 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.038425 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.141887 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.141940 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.141955 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.141979 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.142006 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.244862 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.244901 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.244909 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.244925 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.244936 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.347385 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.347413 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.347421 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.347436 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.347448 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.350274 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/2.log" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.353627 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.354063 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.367851 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.380720 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.397151 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.412841 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.430470 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.446281 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.450839 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.450906 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.450918 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.450943 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.450958 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.461208 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.474231 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.485858 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.501764 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.518648 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.532024 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.548280 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.552933 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.552966 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.552977 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.553000 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.553012 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.560003 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.572342 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.590110 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.614050 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:43Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.656256 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.656346 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.656365 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.656396 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.656414 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.759342 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.759400 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.759410 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.759428 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.759440 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.795256 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.795362 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.795362 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:43 crc kubenswrapper[4953]: E1011 02:47:43.795725 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:43 crc kubenswrapper[4953]: E1011 02:47:43.795953 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:43 crc kubenswrapper[4953]: E1011 02:47:43.796025 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.808034 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.862174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.862224 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.862233 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.862254 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.862267 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.964964 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.965038 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.965058 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.965089 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:43 crc kubenswrapper[4953]: I1011 02:47:43.965109 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:43Z","lastTransitionTime":"2025-10-11T02:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.068540 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.068677 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.068708 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.068746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.068777 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.172040 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.172115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.172135 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.172163 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.172184 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.275678 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.275787 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.275811 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.275837 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.275851 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.359429 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/3.log" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.361311 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/2.log" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.379432 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.379509 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.379528 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.379557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.379576 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.379677 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" exitCode=1 Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.380574 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.380683 4953 scope.go:117] "RemoveContainer" containerID="b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.382442 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:47:44 crc kubenswrapper[4953]: E1011 02:47:44.382782 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.404436 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.424838 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.438948 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.451271 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.468495 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.483537 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.483586 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.483618 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.483638 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.483651 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.484657 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.496938 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.512087 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.526318 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.538514 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.548143 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.565617 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.585732 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.585764 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.585777 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.585794 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.585807 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.594750 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30487ae9b955b4511005449b531dabc7ea762137099932270159e2e3dbc9187\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:18Z\\\",\\\"message\\\":\\\" resource setup for 14 objects: [openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9 openshift-ovn-kubernetes/ovnkube-node-7277g openshift-kube-controller-manager/kube-controller-manager-crc openshift-machine-config-operator/machine-config-daemon-9jz9g openshift-multus/multus-additional-cni-plugins-fxswv openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-source-55646444c4-trplf openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-image-registry/node-ca-gp27l openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-network-diagnostics/network-check-target-xd92c openshift-network-node-identity/network-node-identity-vrzqb openshift-dns/node-resolver-tshlv openshift-multus/network-metrics-daemon-bp9sq]\\\\nF1011 02:47:18.022478 6599 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:43Z\\\",\\\"message\\\":\\\" model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:192.168.126.11:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {b21188fe-5483-4717-afe6-20a41a40b91a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:43.668296 6925 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9\\\\nF1011 02:47:43.668332 6925 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.606225 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0531d659-6c56-496b-bd96-5bf9c3abdbdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261344b70969642c0d021b5f2b786d710391c5114aa756651d5cf7a436514f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.625306 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.639657 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.659438 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.670640 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:44Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.688596 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.688659 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.688673 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.688694 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.688707 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.792014 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.792062 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.792079 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.792104 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.792121 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.795148 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:44 crc kubenswrapper[4953]: E1011 02:47:44.795291 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.895722 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.895773 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.895791 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.895817 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.895836 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.998737 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.998785 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.998802 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.998828 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:44 crc kubenswrapper[4953]: I1011 02:47:44.998845 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:44Z","lastTransitionTime":"2025-10-11T02:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.101800 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.101850 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.101867 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.101891 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.101908 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.204789 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.205102 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.205126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.205151 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.205170 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.308549 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.308663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.308685 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.308716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.308735 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.388353 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/3.log" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.403177 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:47:45 crc kubenswrapper[4953]: E1011 02:47:45.403567 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.412557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.412650 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.412668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.412696 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.412719 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.429252 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.453686 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.479540 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.502567 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.516686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.516834 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.516885 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.516947 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.516979 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.521551 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.546535 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.567978 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.588518 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.606092 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0531d659-6c56-496b-bd96-5bf9c3abdbdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261344b70969642c0d021b5f2b786d710391c5114aa756651d5cf7a436514f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.621186 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.621253 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.621271 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.621299 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.621319 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.627826 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.656397 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.673585 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.699038 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.724780 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.724834 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.724849 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.724875 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.724891 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.732373 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:43Z\\\",\\\"message\\\":\\\" model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:192.168.126.11:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {b21188fe-5483-4717-afe6-20a41a40b91a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:43.668296 6925 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9\\\\nF1011 02:47:43.668332 6925 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.755438 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.778121 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.795054 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.795104 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:45 crc kubenswrapper[4953]: E1011 02:47:45.795306 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.795411 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:45 crc kubenswrapper[4953]: E1011 02:47:45.795706 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:45 crc kubenswrapper[4953]: E1011 02:47:45.795920 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.803312 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.823941 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:45Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.828378 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.828586 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.828782 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.828939 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.829070 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.933149 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.933246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.933306 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.933344 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:45 crc kubenswrapper[4953]: I1011 02:47:45.933365 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:45Z","lastTransitionTime":"2025-10-11T02:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.036685 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.036751 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.036770 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.036795 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.036815 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.140975 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.141024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.141036 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.141057 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.141071 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.244475 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.244550 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.244568 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.244593 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.244643 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.347656 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.347751 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.347773 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.347801 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.347822 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.451456 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.451556 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.451583 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.451681 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.451714 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.555539 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.555645 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.555665 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.555692 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.555711 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.659589 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.659726 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.659750 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.659792 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.659822 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.763647 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.764068 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.764378 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.764851 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.765065 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.795463 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:46 crc kubenswrapper[4953]: E1011 02:47:46.795921 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.868933 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.869496 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.869686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.869889 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.870035 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.973806 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.973868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.973884 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.973911 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:46 crc kubenswrapper[4953]: I1011 02:47:46.973928 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:46Z","lastTransitionTime":"2025-10-11T02:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.077143 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.077192 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.077209 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.077234 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.077251 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.185044 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.185132 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.185158 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.185192 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.185216 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.289730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.289825 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.289861 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.289896 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.289927 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.393186 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.393259 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.393283 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.393314 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.393340 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.458179 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.458251 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.458290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.458321 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.458341 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.481050 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:47Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.487796 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.487914 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.487935 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.487962 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.487981 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.515589 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:47Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.522517 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.522561 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.522570 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.522586 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.522600 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.542135 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:47Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.549119 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.549162 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.549172 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.549188 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.549201 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.564159 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:47Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.568240 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.568271 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.568283 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.568301 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.568316 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.585732 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:47Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.585997 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.588669 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.588729 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.588749 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.588790 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.588808 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.693056 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.693474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.693708 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.693922 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.694068 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.794821 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.794874 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.795079 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.795761 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.795466 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:47 crc kubenswrapper[4953]: E1011 02:47:47.796040 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.797996 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.798047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.798065 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.798091 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.798110 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.901420 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.901487 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.901499 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.901523 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:47 crc kubenswrapper[4953]: I1011 02:47:47.901541 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:47Z","lastTransitionTime":"2025-10-11T02:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.005182 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.005250 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.005275 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.005304 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.005325 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.108803 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.108877 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.108894 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.108920 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.108943 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.212656 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.212741 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.212765 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.212796 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.212818 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.316066 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.316151 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.316175 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.316205 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.316224 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.419167 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.419239 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.419256 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.419282 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.419299 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.522233 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.522365 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.522383 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.522409 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.522429 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.626498 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.626587 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.626651 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.626686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.626708 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.730352 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.730428 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.730446 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.730472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.730491 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.794361 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:48 crc kubenswrapper[4953]: E1011 02:47:48.794550 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.833875 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.833987 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.834007 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.834035 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.834052 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.938312 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.938440 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.938468 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.938499 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:48 crc kubenswrapper[4953]: I1011 02:47:48.938519 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:48Z","lastTransitionTime":"2025-10-11T02:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.041537 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.041642 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.041662 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.041686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.041705 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.145897 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.145964 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.145989 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.146016 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.146043 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.250094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.250183 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.250211 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.250244 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.250270 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.354731 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.354868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.354892 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.354934 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.355046 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.460969 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.461072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.461098 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.461135 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.461158 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.565461 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.565510 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.565526 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.565553 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.565570 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.669264 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.669333 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.669351 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.669378 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.669397 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.773246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.773325 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.773340 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.773360 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.773374 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.794967 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:49 crc kubenswrapper[4953]: E1011 02:47:49.795187 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.795512 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:49 crc kubenswrapper[4953]: E1011 02:47:49.795708 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.795940 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:49 crc kubenswrapper[4953]: E1011 02:47:49.796296 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.821718 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.857637 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:43Z\\\",\\\"message\\\":\\\" model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:192.168.126.11:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {b21188fe-5483-4717-afe6-20a41a40b91a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:43.668296 6925 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9\\\\nF1011 02:47:43.668332 6925 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.874666 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0531d659-6c56-496b-bd96-5bf9c3abdbdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261344b70969642c0d021b5f2b786d710391c5114aa756651d5cf7a436514f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.877290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.877339 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.877381 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.877403 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.877415 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.892324 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.912642 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.926535 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.944172 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.959459 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.973354 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.980222 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.980486 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.980734 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.980918 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.981090 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:49Z","lastTransitionTime":"2025-10-11T02:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:49 crc kubenswrapper[4953]: I1011 02:47:49.989385 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:49Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.005343 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.024013 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.042330 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.066202 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.085274 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.085317 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.085333 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.085354 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.085369 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.085792 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.109928 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.131755 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.150963 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:50Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.188843 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.188908 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.188920 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.188944 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.188962 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.292273 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.292335 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.292348 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.292375 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.292389 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.401022 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.401126 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.401145 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.401210 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.401230 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.504792 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.504852 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.504868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.504893 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.504909 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.608450 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.608529 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.608547 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.608574 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.608685 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.712476 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.712559 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.712579 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.712647 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.712677 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.794821 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:50 crc kubenswrapper[4953]: E1011 02:47:50.795567 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.815795 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.815852 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.815872 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.815897 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.815916 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.919371 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.919821 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.920023 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.920174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:50 crc kubenswrapper[4953]: I1011 02:47:50.920297 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:50Z","lastTransitionTime":"2025-10-11T02:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.024717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.024805 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.024832 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.024868 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.024891 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.128725 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.128814 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.128836 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.128869 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.128897 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.232453 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.232542 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.232568 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.232637 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.232664 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.336742 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.337224 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.337375 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.337525 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.337745 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.442028 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.442132 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.442157 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.442191 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.442215 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.546507 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.546587 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.546639 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.546674 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.546694 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.651302 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.651391 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.651410 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.651441 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.651463 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.756174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.756266 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.756287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.756318 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.756338 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.795088 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.795204 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.795137 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:51 crc kubenswrapper[4953]: E1011 02:47:51.795360 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:51 crc kubenswrapper[4953]: E1011 02:47:51.795548 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:51 crc kubenswrapper[4953]: E1011 02:47:51.795833 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.860094 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.860148 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.860166 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.860193 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.860212 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.964326 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.964401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.964428 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.964458 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:51 crc kubenswrapper[4953]: I1011 02:47:51.964482 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:51Z","lastTransitionTime":"2025-10-11T02:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.067202 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.067270 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.067297 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.067329 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.067358 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.170552 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.170626 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.170638 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.170661 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.170678 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.273998 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.274067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.274079 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.274104 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.274118 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.377479 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.377592 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.377641 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.377668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.377685 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.481732 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.481804 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.481824 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.481850 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.481903 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.585416 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.585484 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.585500 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.585531 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.585551 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.689155 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.689213 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.689231 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.689257 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.689274 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.794506 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.794586 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.794635 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.794702 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.794737 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:52 crc kubenswrapper[4953]: E1011 02:47:52.795008 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.794741 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.899407 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.899477 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.899502 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.899529 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:52 crc kubenswrapper[4953]: I1011 02:47:52.899547 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:52Z","lastTransitionTime":"2025-10-11T02:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.013778 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.013887 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.013904 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.013928 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.013943 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.117571 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.118135 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.118303 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.118440 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.118575 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.223123 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.223209 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.223233 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.223269 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.223289 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.327402 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.327486 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.327502 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.327532 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.327558 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.431719 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.431793 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.431814 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.431842 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.431859 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.439770 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.440011 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.439962458 +0000 UTC m=+148.373050142 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.535546 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.535715 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.535776 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.535840 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.535860 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.541293 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.541443 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.541506 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.541565 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.541573 4953 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.541713 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.541681088 +0000 UTC m=+148.474768762 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.541797 4953 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.541906 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.541874583 +0000 UTC m=+148.474962267 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542024 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542073 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542093 4953 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542120 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542176 4953 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542205 4953 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542236 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.542208921 +0000 UTC m=+148.475296575 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.542331 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.542287653 +0000 UTC m=+148.475375327 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.641688 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.641784 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.641805 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.641832 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.641854 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.745502 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.745579 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.745650 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.745681 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.745700 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.794835 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.794982 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.795242 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.795319 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.795688 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:53 crc kubenswrapper[4953]: E1011 02:47:53.796012 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.849946 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.850001 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.850013 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.850034 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.850047 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.953096 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.953158 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.953173 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.953198 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:53 crc kubenswrapper[4953]: I1011 02:47:53.953221 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:53Z","lastTransitionTime":"2025-10-11T02:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.057061 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.057133 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.057154 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.057184 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.057204 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.161266 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.161401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.161452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.161492 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.161538 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.264973 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.265036 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.265054 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.265082 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.265100 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.369177 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.369242 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.369261 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.369289 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.369316 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.472815 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.472890 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.472909 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.472934 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.472953 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.576146 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.576246 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.576258 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.576283 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.576304 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.681476 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.681559 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.681582 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.681657 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.681696 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.785055 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.785124 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.785149 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.785178 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.785196 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.795009 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:54 crc kubenswrapper[4953]: E1011 02:47:54.795288 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.889159 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.889237 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.889261 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.889291 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.889315 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.992979 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.993063 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.993097 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.993131 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:54 crc kubenswrapper[4953]: I1011 02:47:54.993155 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:54Z","lastTransitionTime":"2025-10-11T02:47:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.096383 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.096457 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.096479 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.096511 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.096537 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.200115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.200198 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.200229 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.200265 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.200298 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.304158 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.304239 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.304267 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.304306 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.304333 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.407720 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.407769 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.407784 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.407804 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.407816 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.511714 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.511780 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.511800 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.511828 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.511847 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.616182 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.616243 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.616262 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.616289 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.616307 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.721116 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.721170 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.721181 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.721204 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.721214 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.794664 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.794731 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.794903 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:55 crc kubenswrapper[4953]: E1011 02:47:55.795045 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:55 crc kubenswrapper[4953]: E1011 02:47:55.795335 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:55 crc kubenswrapper[4953]: E1011 02:47:55.795434 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.824979 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.825050 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.825072 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.825105 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.825128 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.928413 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.928455 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.928466 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.928485 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:55 crc kubenswrapper[4953]: I1011 02:47:55.928496 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:55Z","lastTransitionTime":"2025-10-11T02:47:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.032352 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.032421 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.032434 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.032455 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.032469 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.135797 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.135835 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.135845 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.135861 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.135873 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.239204 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.239268 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.239282 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.239305 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.239321 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.341864 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.341912 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.341922 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.341939 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.341956 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.445201 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.445300 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.445331 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.445373 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.445394 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.549869 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.549950 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.549967 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.550002 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.550022 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.653163 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.653275 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.653294 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.653321 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.653342 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.758088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.758176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.758198 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.758232 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.758252 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.795161 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:56 crc kubenswrapper[4953]: E1011 02:47:56.795385 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.862018 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.862091 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.862110 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.862140 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.862159 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.966088 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.966162 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.966179 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.966214 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:56 crc kubenswrapper[4953]: I1011 02:47:56.966232 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:56Z","lastTransitionTime":"2025-10-11T02:47:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.069516 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.070047 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.070071 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.070106 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.070126 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.173650 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.173730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.173753 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.173781 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.173801 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.277181 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.277250 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.277274 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.277309 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.277331 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.381536 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.381659 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.381688 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.381723 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.381745 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.485317 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.485383 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.485401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.485426 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.485446 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.590074 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.590138 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.590158 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.590184 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.590205 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.693575 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.693668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.693686 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.693712 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.693730 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.744730 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.744785 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.744801 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.744825 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.744847 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.769313 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.776278 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.776347 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.776375 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.776407 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.776432 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.794835 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.795269 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.795462 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.795763 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.795879 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.796184 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.803948 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.812413 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.812474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.812494 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.812521 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.812540 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.833874 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.839771 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.840059 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.840290 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.840530 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.840813 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.862304 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.870160 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.870218 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.870238 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.870265 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.870285 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.891181 4953 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0371f094-ffaa-4075-95ad-f84ddb50698e\\\",\\\"systemUUID\\\":\\\"af776600-3675-4e95-bb2d-3199a948d066\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:57Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:57 crc kubenswrapper[4953]: E1011 02:47:57.891443 4953 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.894598 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.894671 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.894687 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.894712 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.894731 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.998052 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.998390 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.998528 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.998716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:57 crc kubenswrapper[4953]: I1011 02:47:57.998869 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:57Z","lastTransitionTime":"2025-10-11T02:47:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.102024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.102085 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.102103 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.102131 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.102152 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.205505 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.205580 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.205632 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.205664 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.205697 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.309663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.309740 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.309758 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.309792 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.309813 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.413706 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.413803 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.413820 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.413848 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.413870 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.517031 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.517092 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.517109 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.517133 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.517154 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.620472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.620541 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.620560 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.620597 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.620659 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.724903 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.724978 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.724996 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.725024 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.725045 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.794774 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:47:58 crc kubenswrapper[4953]: E1011 02:47:58.795544 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.829789 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.829866 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.829885 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.829916 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.829937 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.933739 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.933844 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.933864 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.933895 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:58 crc kubenswrapper[4953]: I1011 02:47:58.933919 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:58Z","lastTransitionTime":"2025-10-11T02:47:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.036996 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.037093 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.037113 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.037139 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.037160 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.140905 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.140961 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.140972 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.140996 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.141012 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.244747 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.244830 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.244850 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.244880 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.244899 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.348423 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.348524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.348542 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.348573 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.348598 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.453037 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.453128 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.453155 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.453194 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.453219 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.556558 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.556690 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.556709 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.556740 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.556768 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.660110 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.660167 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.660180 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.660201 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.660213 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.763623 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.763685 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.763697 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.763717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.763730 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.795467 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.795505 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.795623 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:47:59 crc kubenswrapper[4953]: E1011 02:47:59.795665 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:47:59 crc kubenswrapper[4953]: E1011 02:47:59.795924 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:47:59 crc kubenswrapper[4953]: E1011 02:47:59.796060 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.796424 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:47:59 crc kubenswrapper[4953]: E1011 02:47:59.796858 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.814048 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.819349 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b54e00d7-d1b9-4141-a98e-49ab93882a26\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c62648237e863b32516a3934d6929119ce6c686b70308af083b6ee0c071190d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61a5b6df79bc18e988060c7ec7e61314181c2b858bda531242d99ed212f55e25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e947125ef66d8000c08d5e2ec05ab34f85a969ff66d9192ea6d6259df7f52052\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3357d2216257f733f0539ae90298d217f52293fc225836b462f11a55bfd98c6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0c84919f0e14d51c900b2f2b3e896794422039931861ca17050e084ce4b49de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1011 02:46:43.742810 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1011 02:46:43.744153 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2852012334/tls.crt::/tmp/serving-cert-2852012334/tls.key\\\\\\\"\\\\nI1011 02:46:49.549120 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1011 02:46:49.553717 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1011 02:46:49.553748 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1011 02:46:49.553792 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1011 02:46:49.553803 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1011 02:46:49.570732 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1011 02:46:49.570799 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1011 02:46:49.570907 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570919 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1011 02:46:49.570930 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1011 02:46:49.570944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1011 02:46:49.570951 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1011 02:46:49.570957 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1011 02:46:49.573122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://72262886b74deb4b95365bfe2d80b8e1a67407103615958afd8ee4ea03a284b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c9cdc98709fa1946fecfa52869ab07779fdddd2e5a75f1524b36eca7596d4b0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.835471 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.856542 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t8zfg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5a114089-658e-442c-b755-9ca9b127f368\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:41Z\\\",\\\"message\\\":\\\"2025-10-11T02:46:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4\\\\n2025-10-11T02:46:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_1fe1485b-3928-4140-8e29-8f90a856d0a4 to /host/opt/cni/bin/\\\\n2025-10-11T02:46:56Z [verbose] multus-daemon started\\\\n2025-10-11T02:46:56Z [verbose] Readiness Indicator file check\\\\n2025-10-11T02:47:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwtzr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t8zfg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.867236 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.867454 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.867670 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.867877 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.868045 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.871428 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a55d5e63-14a1-4d53-be84-21dce9f0c53d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://efb6f76d3651fc3e592f4c34897066452e642067f06a63fe273ed3d1278539cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rh2dx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-9jz9g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.886721 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"329460ba-d6c9-4774-b8d3-354e4406575c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2btjm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bp9sq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.906680 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://28b05ff1206967ef633e2afda2f9ce5aa2dffe004c465a069ffea8420f9bfbac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.926581 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.950302 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1356fafe-a703-47a2-8d51-f34303e06ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37066a0de8dede779318bd54f9839d159ced048708749b3366084d574dc8826e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1137d0f33ddebdca43654807c01b82b09abd38e51283f064ac79f3a706488f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b5m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:47:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-f7nz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.972299 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.972377 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.972395 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.972417 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.972461 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:47:59Z","lastTransitionTime":"2025-10-11T02:47:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.976584 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f390367a-136d-4992-a5a8-75d12ae2a94a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-11T02:47:43Z\\\",\\\"message\\\":\\\" model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:default/kubernetes]} name:Service_default/kubernetes_TCP_node_switch_crc options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.1:443:192.168.126.11:6443]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {b21188fe-5483-4717-afe6-20a41a40b91a}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1011 02:47:43.668296 6925 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9\\\\nF1011 02:47:43.668332 6925 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-11T02:47:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmskm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-7277g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:47:59 crc kubenswrapper[4953]: I1011 02:47:59.995665 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0531d659-6c56-496b-bd96-5bf9c3abdbdd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://261344b70969642c0d021b5f2b786d710391c5114aa756651d5cf7a436514f58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://378a59dee82adc90799136fb9b759eefbb85bc84cc3fe3f353a5fd932e28a1a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:47:59Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.017077 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.034217 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf01b3fcded4b31f6c9c03e03d439733427d7d1a0eaafa8cb0f5f3622dcc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.050532 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tshlv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd73dbbf-9c04-4826-9bbd-56341abba133\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43f502e2d2d8faaa9d21cd5480ee973d17b0ffb228564f1c66974fc2aa29d472\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qc9t9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tshlv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.073768 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-fxswv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e359a42a-4046-4856-8936-b570d11fb061\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ba98c377bf6051f9db8e0604ea6adec7d79d9776605f8da5872f5cfbb8222e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:47:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37295c5deab28808265750e3cdaa801096d06e2894c9fc5bfb640405b0eefaad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d39766ddc3606ef7c2754d3f1e7ee1268ae5e8d0561bbd2756e7c46444edd5af\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a955dc7697e4c60990ea754b39613d0d08de34bbc551a82313d1f8ca74e3a9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://598890a8f80afbb628f4f4386d5e60bd513f6a8e13ee3af5c25c6362e17aa4c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d436885385025c27299ed4ca9bb0ef654da4a14db23d062cfb84ff1187113630\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b2298a8dc420d6a81d236ad5417e5af8d10dcb5d131b4aad38bd4e5277fb289\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blmpp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-fxswv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.075749 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.075823 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.075836 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.075859 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.075904 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.094269 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc57884-2952-4631-95c0-4743da966d54\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3de2f4468bf9c3e75cc184e0d402232d1304158910c17f80b6db9f21fd4eae6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://abf7f0a632eadabe7e6fcb948499c9b2b026df1a0f298b15e75cee8a87ba74bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://55296cb6c77152de5acb0a03ab098c0aac82e7fdfc4dd7c4751aacc2c1d61377\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6457bf8069975d6b8465b584b5ff0c04e4fa443db1963b52b23eeb2a9cd214e1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.114917 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f9f5574-d665-4052-80b0-42da159ece4c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:47:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d8d0d348130e041a5702945f3c51811bbdfe92ce01a139d6b3be68438b6a3d0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f09ccaf47508b7774910efd611a5ae2acefd13c4531ec7860430afbd37d5621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6172b0e0f81bfe02ba60b3b904289f6a7637ebc90a90551d8c262f64151b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c343de7fe7486cd3944a78b7a57f58932528eb15033363a422eaed2a0e0b9ef\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-11T02:46:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-11T02:46:30Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:29Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.134047 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ca682eea8524d07fdcda3158e8ea2a0c460e19b032d60964bd1591ba48c259c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://baadcf4bccf8ae43cec1a599bc03fb2fe455b460cc748d28cb7fd7f480068c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.155329 4953 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-gp27l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c909e377-5fa6-4647-b368-0e5436d9e407\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-11T02:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbcab115fc0fc22dce34c057169a33098073db592eef86d5991a98f7a4365831\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-11T02:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kzkb5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-11T02:46:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-gp27l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-11T02:48:00Z is after 2025-08-24T17:21:41Z" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.179391 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.179441 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.179452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.179472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.179483 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.283929 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.284000 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.284017 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.284049 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.284080 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.388899 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.388995 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.389012 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.389039 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.389060 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.494911 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.494992 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.495010 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.495040 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.495058 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.599131 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.599208 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.599227 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.599261 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.599279 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.703777 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.703848 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.703864 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.703894 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.703912 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.794852 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:00 crc kubenswrapper[4953]: E1011 02:48:00.795078 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.807182 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.807243 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.807260 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.807295 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.807317 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.910747 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.910826 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.910846 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.910878 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:00 crc kubenswrapper[4953]: I1011 02:48:00.910901 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:00Z","lastTransitionTime":"2025-10-11T02:48:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.014684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.014804 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.014829 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.014860 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.014884 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.118545 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.118668 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.118688 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.118716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.118736 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.222769 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.222838 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.222860 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.222893 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.222917 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.326428 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.326486 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.326503 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.326530 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.326548 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.430191 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.430245 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.430262 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.430288 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.430306 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.533054 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.533120 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.533140 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.533165 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.533184 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.636128 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.636156 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.636163 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.636176 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.636187 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.739498 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.739576 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.739592 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.739658 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.739679 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.795135 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.795179 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.795226 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:01 crc kubenswrapper[4953]: E1011 02:48:01.795433 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:01 crc kubenswrapper[4953]: E1011 02:48:01.795546 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:01 crc kubenswrapper[4953]: E1011 02:48:01.795879 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.842559 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.842663 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.842694 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.842726 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.842750 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.946832 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.946900 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.946916 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.946942 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:01 crc kubenswrapper[4953]: I1011 02:48:01.946962 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:01Z","lastTransitionTime":"2025-10-11T02:48:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.050108 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.050174 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.050197 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.050235 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.050260 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.153872 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.153973 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.154003 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.154040 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.154063 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.257300 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.257384 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.257409 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.257445 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.257473 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.360530 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.360593 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.360656 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.360684 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.360702 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.464134 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.464226 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.464248 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.464287 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.464305 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.568699 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.568771 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.568790 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.568821 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.568842 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.672886 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.672986 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.673041 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.673068 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.673087 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.776880 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.776964 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.776983 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.777014 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.777033 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.795338 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:02 crc kubenswrapper[4953]: E1011 02:48:02.795563 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.880640 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.880716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.880735 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.880766 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.880786 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.984719 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.984777 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.984792 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.984814 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:02 crc kubenswrapper[4953]: I1011 02:48:02.984831 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:02Z","lastTransitionTime":"2025-10-11T02:48:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.088146 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.088219 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.088237 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.088267 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.088297 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.192236 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.192350 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.192373 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.192439 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.192464 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.295559 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.295709 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.295731 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.295765 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.295784 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.403044 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.403120 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.403138 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.403167 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.403184 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.507199 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.507280 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.507301 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.507334 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.507358 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.610981 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.611067 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.611100 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.611133 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.611154 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.714588 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.714717 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.714739 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.714770 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.714789 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.795295 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.795357 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.795453 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:03 crc kubenswrapper[4953]: E1011 02:48:03.795569 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:03 crc kubenswrapper[4953]: E1011 02:48:03.795747 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:03 crc kubenswrapper[4953]: E1011 02:48:03.795868 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.818297 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.818350 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.818366 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.818392 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.818410 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.920932 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.921004 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.921023 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.921053 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:03 crc kubenswrapper[4953]: I1011 02:48:03.921073 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:03Z","lastTransitionTime":"2025-10-11T02:48:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.024367 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.024438 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.024464 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.024494 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.024517 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.127324 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.127419 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.127442 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.127472 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.127493 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.231272 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.231327 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.231337 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.231357 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.231368 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.336452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.336533 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.336553 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.336581 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.336651 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.440586 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.441382 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.441593 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.441845 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.441997 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.545482 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.545550 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.545568 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.545594 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.545638 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.649366 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.649442 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.649464 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.649499 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.649525 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.753033 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.753086 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.753101 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.753121 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.753140 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.794741 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:04 crc kubenswrapper[4953]: E1011 02:48:04.794977 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.856746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.856870 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.856899 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.856937 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.856963 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.960285 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.960406 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.960435 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.960465 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:04 crc kubenswrapper[4953]: I1011 02:48:04.960486 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:04Z","lastTransitionTime":"2025-10-11T02:48:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.064134 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.064230 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.064254 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.064288 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.064312 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.168009 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.168117 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.168143 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.168193 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.168222 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.272090 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.272170 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.272190 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.272223 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.272243 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.375299 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.375349 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.375362 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.375380 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.375393 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.478471 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.478514 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.478524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.478541 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.478551 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.583367 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.583431 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.583448 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.583475 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.583498 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.686447 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.686534 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.686549 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.686570 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.686584 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.790651 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.790705 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.790716 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.790736 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.790750 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.794927 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.794947 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.794991 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:05 crc kubenswrapper[4953]: E1011 02:48:05.795080 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:05 crc kubenswrapper[4953]: E1011 02:48:05.795184 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:05 crc kubenswrapper[4953]: E1011 02:48:05.795544 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.893885 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.894470 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.894500 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.894531 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.894549 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.997446 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.997512 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.997524 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.997547 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:05 crc kubenswrapper[4953]: I1011 02:48:05.997567 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:05Z","lastTransitionTime":"2025-10-11T02:48:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.101587 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.101693 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.101713 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.101742 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.101762 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.205036 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.205115 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.205134 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.205163 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.205185 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.309477 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.309552 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.309571 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.309599 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.309646 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.414696 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.414799 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.414823 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.414862 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.414886 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.518256 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.518320 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.518336 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.518364 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.518382 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.621205 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.621310 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.621340 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.621378 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.621401 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.724883 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.724930 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.724941 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.724958 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.724970 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.794781 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:06 crc kubenswrapper[4953]: E1011 02:48:06.795000 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.827767 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.827842 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.827865 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.827908 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.827928 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.931012 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.931102 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.931119 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.931150 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:06 crc kubenswrapper[4953]: I1011 02:48:06.931169 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:06Z","lastTransitionTime":"2025-10-11T02:48:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.035265 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.035328 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.035350 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.035383 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.035405 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.138233 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.138313 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.138332 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.138359 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.138382 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.241439 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.241487 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.241502 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.241528 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.241547 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.345320 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.345429 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.345451 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.345519 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.345541 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.448373 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.448441 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.448452 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.448474 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.448489 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.551378 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.551455 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.551478 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.551512 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.551530 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.654913 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.654949 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.654958 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.654974 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.654984 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.759340 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.759401 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.759414 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.759431 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.759443 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.795043 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.795220 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.795459 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:07 crc kubenswrapper[4953]: E1011 02:48:07.795744 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:07 crc kubenswrapper[4953]: E1011 02:48:07.795928 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:07 crc kubenswrapper[4953]: E1011 02:48:07.796132 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.863473 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.863547 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.863566 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.863593 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.863645 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.968688 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.968766 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.968790 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.968825 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:07 crc kubenswrapper[4953]: I1011 02:48:07.968848 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:07Z","lastTransitionTime":"2025-10-11T02:48:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.072486 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.072557 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.072575 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.072634 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.072656 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:08Z","lastTransitionTime":"2025-10-11T02:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.176746 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.176824 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.176843 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.176873 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.176895 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:08Z","lastTransitionTime":"2025-10-11T02:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.217512 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.217578 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.217598 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.217689 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.217719 4953 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-11T02:48:08Z","lastTransitionTime":"2025-10-11T02:48:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.297637 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn"] Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.298304 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.301934 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.302528 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.303082 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.303975 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.331724 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-service-ca\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.331792 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.331846 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.331986 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.332027 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.387061 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=9.387028150999999 podStartE2EDuration="9.387028151s" podCreationTimestamp="2025-10-11 02:47:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.359770534 +0000 UTC m=+99.292858208" watchObservedRunningTime="2025-10-11 02:48:08.387028151 +0000 UTC m=+99.320115825" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.411897 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.411860175 podStartE2EDuration="1m18.411860175s" podCreationTimestamp="2025-10-11 02:46:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.388219131 +0000 UTC m=+99.321306805" watchObservedRunningTime="2025-10-11 02:48:08.411860175 +0000 UTC m=+99.344947859" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.412166 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.412156353 podStartE2EDuration="51.412156353s" podCreationTimestamp="2025-10-11 02:47:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.41163728 +0000 UTC m=+99.344724964" watchObservedRunningTime="2025-10-11 02:48:08.412156353 +0000 UTC m=+99.345244027" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.433391 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.433468 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.433530 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-service-ca\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.433565 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.433669 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.433870 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.434110 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.435502 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-service-ca\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.447326 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.471382 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-gp27l" podStartSLOduration=77.471352086 podStartE2EDuration="1m17.471352086s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.470526535 +0000 UTC m=+99.403614219" watchObservedRunningTime="2025-10-11 02:48:08.471352086 +0000 UTC m=+99.404439770" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.471581 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-b92sn\" (UID: \"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.526385 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=79.526349042 podStartE2EDuration="1m19.526349042s" podCreationTimestamp="2025-10-11 02:46:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.499132666 +0000 UTC m=+99.432220330" watchObservedRunningTime="2025-10-11 02:48:08.526349042 +0000 UTC m=+99.459436726" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.551374 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-t8zfg" podStartSLOduration=77.55133205 podStartE2EDuration="1m17.55133205s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.551050743 +0000 UTC m=+99.484138427" watchObservedRunningTime="2025-10-11 02:48:08.55133205 +0000 UTC m=+99.484419744" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.572884 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podStartSLOduration=77.57284069 podStartE2EDuration="1m17.57284069s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.571483905 +0000 UTC m=+99.504571589" watchObservedRunningTime="2025-10-11 02:48:08.57284069 +0000 UTC m=+99.505928374" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.626564 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.692492 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-f7nz9" podStartSLOduration=76.692464887 podStartE2EDuration="1m16.692464887s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.672478516 +0000 UTC m=+99.605566200" watchObservedRunningTime="2025-10-11 02:48:08.692464887 +0000 UTC m=+99.625552531" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.709922 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=25.709896823 podStartE2EDuration="25.709896823s" podCreationTimestamp="2025-10-11 02:47:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.709315058 +0000 UTC m=+99.642402702" watchObservedRunningTime="2025-10-11 02:48:08.709896823 +0000 UTC m=+99.642984477" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.764159 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-tshlv" podStartSLOduration=77.764125209 podStartE2EDuration="1m17.764125209s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.763061182 +0000 UTC m=+99.696148856" watchObservedRunningTime="2025-10-11 02:48:08.764125209 +0000 UTC m=+99.697212893" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.791238 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-fxswv" podStartSLOduration=77.791212731 podStartE2EDuration="1m17.791212731s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:08.79039451 +0000 UTC m=+99.723482174" watchObservedRunningTime="2025-10-11 02:48:08.791212731 +0000 UTC m=+99.724300415" Oct 11 02:48:08 crc kubenswrapper[4953]: I1011 02:48:08.795001 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:08 crc kubenswrapper[4953]: E1011 02:48:08.795129 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:09 crc kubenswrapper[4953]: I1011 02:48:09.506585 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" event={"ID":"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74","Type":"ContainerStarted","Data":"b15470b806fd06b0bec1e4cb48c3afb613cc08f7f7c7b4b75c7971da4493c560"} Oct 11 02:48:09 crc kubenswrapper[4953]: I1011 02:48:09.506666 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" event={"ID":"d777fca1-734e-4d3c-aaa9-f9b7a9d0bd74","Type":"ContainerStarted","Data":"9f9535d9ea96fb66a1529485f2800d18055a771f57a7888fb4ea9bb58da2f0f3"} Oct 11 02:48:09 crc kubenswrapper[4953]: I1011 02:48:09.795328 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:09 crc kubenswrapper[4953]: I1011 02:48:09.795431 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:09 crc kubenswrapper[4953]: E1011 02:48:09.799562 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:09 crc kubenswrapper[4953]: E1011 02:48:09.799858 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:09 crc kubenswrapper[4953]: I1011 02:48:09.800187 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:09 crc kubenswrapper[4953]: E1011 02:48:09.800310 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:10 crc kubenswrapper[4953]: I1011 02:48:10.255575 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:10 crc kubenswrapper[4953]: E1011 02:48:10.255875 4953 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:48:10 crc kubenswrapper[4953]: E1011 02:48:10.256195 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs podName:329460ba-d6c9-4774-b8d3-354e4406575c nodeName:}" failed. No retries permitted until 2025-10-11 02:49:14.256146952 +0000 UTC m=+165.189234626 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs") pod "network-metrics-daemon-bp9sq" (UID: "329460ba-d6c9-4774-b8d3-354e4406575c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 11 02:48:10 crc kubenswrapper[4953]: I1011 02:48:10.794631 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:10 crc kubenswrapper[4953]: E1011 02:48:10.794784 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:11 crc kubenswrapper[4953]: I1011 02:48:11.794683 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:11 crc kubenswrapper[4953]: I1011 02:48:11.794797 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:11 crc kubenswrapper[4953]: E1011 02:48:11.794839 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:11 crc kubenswrapper[4953]: I1011 02:48:11.794797 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:11 crc kubenswrapper[4953]: E1011 02:48:11.794997 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:11 crc kubenswrapper[4953]: E1011 02:48:11.795174 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:12 crc kubenswrapper[4953]: I1011 02:48:12.794870 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:12 crc kubenswrapper[4953]: E1011 02:48:12.795301 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:13 crc kubenswrapper[4953]: I1011 02:48:13.794774 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:13 crc kubenswrapper[4953]: I1011 02:48:13.794877 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:13 crc kubenswrapper[4953]: I1011 02:48:13.794994 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:13 crc kubenswrapper[4953]: E1011 02:48:13.796668 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:13 crc kubenswrapper[4953]: E1011 02:48:13.796873 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:13 crc kubenswrapper[4953]: E1011 02:48:13.797118 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:14 crc kubenswrapper[4953]: I1011 02:48:14.795094 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:14 crc kubenswrapper[4953]: E1011 02:48:14.795341 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:14 crc kubenswrapper[4953]: I1011 02:48:14.796460 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:48:14 crc kubenswrapper[4953]: E1011 02:48:14.796815 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-7277g_openshift-ovn-kubernetes(f390367a-136d-4992-a5a8-75d12ae2a94a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" Oct 11 02:48:15 crc kubenswrapper[4953]: I1011 02:48:15.795831 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:15 crc kubenswrapper[4953]: E1011 02:48:15.795996 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:15 crc kubenswrapper[4953]: I1011 02:48:15.796147 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:15 crc kubenswrapper[4953]: I1011 02:48:15.796159 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:15 crc kubenswrapper[4953]: E1011 02:48:15.796370 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:15 crc kubenswrapper[4953]: E1011 02:48:15.796557 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:16 crc kubenswrapper[4953]: I1011 02:48:16.794673 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:16 crc kubenswrapper[4953]: E1011 02:48:16.794816 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:17 crc kubenswrapper[4953]: I1011 02:48:17.800199 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:17 crc kubenswrapper[4953]: E1011 02:48:17.800435 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:17 crc kubenswrapper[4953]: I1011 02:48:17.803704 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:17 crc kubenswrapper[4953]: I1011 02:48:17.803716 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:17 crc kubenswrapper[4953]: E1011 02:48:17.803874 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:17 crc kubenswrapper[4953]: E1011 02:48:17.804061 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:18 crc kubenswrapper[4953]: I1011 02:48:18.795013 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:18 crc kubenswrapper[4953]: E1011 02:48:18.795222 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:19 crc kubenswrapper[4953]: I1011 02:48:19.795001 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:19 crc kubenswrapper[4953]: I1011 02:48:19.795062 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:19 crc kubenswrapper[4953]: E1011 02:48:19.796238 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:19 crc kubenswrapper[4953]: I1011 02:48:19.796267 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:19 crc kubenswrapper[4953]: E1011 02:48:19.796459 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:19 crc kubenswrapper[4953]: E1011 02:48:19.797738 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:20 crc kubenswrapper[4953]: I1011 02:48:20.795124 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:20 crc kubenswrapper[4953]: E1011 02:48:20.795361 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:21 crc kubenswrapper[4953]: I1011 02:48:21.794692 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:21 crc kubenswrapper[4953]: I1011 02:48:21.794831 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:21 crc kubenswrapper[4953]: I1011 02:48:21.794918 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:21 crc kubenswrapper[4953]: E1011 02:48:21.795143 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:21 crc kubenswrapper[4953]: E1011 02:48:21.795309 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:21 crc kubenswrapper[4953]: E1011 02:48:21.795597 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:22 crc kubenswrapper[4953]: I1011 02:48:22.795033 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:22 crc kubenswrapper[4953]: E1011 02:48:22.795248 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:23 crc kubenswrapper[4953]: I1011 02:48:23.795425 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:23 crc kubenswrapper[4953]: I1011 02:48:23.795477 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:23 crc kubenswrapper[4953]: I1011 02:48:23.795693 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:23 crc kubenswrapper[4953]: E1011 02:48:23.795748 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:23 crc kubenswrapper[4953]: E1011 02:48:23.796010 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:23 crc kubenswrapper[4953]: E1011 02:48:23.796321 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:24 crc kubenswrapper[4953]: I1011 02:48:24.795252 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:24 crc kubenswrapper[4953]: E1011 02:48:24.795422 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:25 crc kubenswrapper[4953]: I1011 02:48:25.794799 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:25 crc kubenswrapper[4953]: I1011 02:48:25.794793 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:25 crc kubenswrapper[4953]: I1011 02:48:25.794837 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:25 crc kubenswrapper[4953]: E1011 02:48:25.795026 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:25 crc kubenswrapper[4953]: E1011 02:48:25.795426 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:25 crc kubenswrapper[4953]: E1011 02:48:25.795563 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:26 crc kubenswrapper[4953]: I1011 02:48:26.795134 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:26 crc kubenswrapper[4953]: E1011 02:48:26.795384 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.584819 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/1.log" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.585756 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/0.log" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.585870 4953 generic.go:334] "Generic (PLEG): container finished" podID="5a114089-658e-442c-b755-9ca9b127f368" containerID="c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87" exitCode=1 Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.585951 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerDied","Data":"c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87"} Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.586261 4953 scope.go:117] "RemoveContainer" containerID="ed32fde000cd7336dccb1b19f9c8b9739e2be4c00f45b2bc7b2b1e24a2260f02" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.587224 4953 scope.go:117] "RemoveContainer" containerID="c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87" Oct 11 02:48:27 crc kubenswrapper[4953]: E1011 02:48:27.594081 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-t8zfg_openshift-multus(5a114089-658e-442c-b755-9ca9b127f368)\"" pod="openshift-multus/multus-t8zfg" podUID="5a114089-658e-442c-b755-9ca9b127f368" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.612691 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-b92sn" podStartSLOduration=96.612663762 podStartE2EDuration="1m36.612663762s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:09.535102554 +0000 UTC m=+100.468190198" watchObservedRunningTime="2025-10-11 02:48:27.612663762 +0000 UTC m=+118.545751416" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.794586 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.794744 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:27 crc kubenswrapper[4953]: E1011 02:48:27.794820 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.795292 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:27 crc kubenswrapper[4953]: E1011 02:48:27.795571 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:27 crc kubenswrapper[4953]: E1011 02:48:27.795521 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:27 crc kubenswrapper[4953]: I1011 02:48:27.796058 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.590862 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/1.log" Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.593647 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/3.log" Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.596996 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerStarted","Data":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.597539 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.731010 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podStartSLOduration=97.730973823 podStartE2EDuration="1m37.730973823s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:28.623271101 +0000 UTC m=+119.556358765" watchObservedRunningTime="2025-10-11 02:48:28.730973823 +0000 UTC m=+119.664061507" Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.732464 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-bp9sq"] Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.732688 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:28 crc kubenswrapper[4953]: E1011 02:48:28.732864 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:28 crc kubenswrapper[4953]: I1011 02:48:28.794642 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:28 crc kubenswrapper[4953]: E1011 02:48:28.794841 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:29 crc kubenswrapper[4953]: E1011 02:48:29.729941 4953 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 11 02:48:29 crc kubenswrapper[4953]: I1011 02:48:29.794954 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:29 crc kubenswrapper[4953]: I1011 02:48:29.795020 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:29 crc kubenswrapper[4953]: E1011 02:48:29.795984 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:29 crc kubenswrapper[4953]: E1011 02:48:29.796122 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:29 crc kubenswrapper[4953]: E1011 02:48:29.908774 4953 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 11 02:48:30 crc kubenswrapper[4953]: I1011 02:48:30.795290 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:30 crc kubenswrapper[4953]: I1011 02:48:30.795303 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:30 crc kubenswrapper[4953]: E1011 02:48:30.796962 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:30 crc kubenswrapper[4953]: E1011 02:48:30.797221 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:31 crc kubenswrapper[4953]: I1011 02:48:31.795220 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:31 crc kubenswrapper[4953]: I1011 02:48:31.795231 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:31 crc kubenswrapper[4953]: E1011 02:48:31.795434 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:31 crc kubenswrapper[4953]: E1011 02:48:31.795561 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:32 crc kubenswrapper[4953]: I1011 02:48:32.794884 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:32 crc kubenswrapper[4953]: I1011 02:48:32.794907 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:32 crc kubenswrapper[4953]: E1011 02:48:32.795070 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:32 crc kubenswrapper[4953]: E1011 02:48:32.795219 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:33 crc kubenswrapper[4953]: I1011 02:48:33.794372 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:33 crc kubenswrapper[4953]: I1011 02:48:33.794448 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:33 crc kubenswrapper[4953]: E1011 02:48:33.794674 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:33 crc kubenswrapper[4953]: E1011 02:48:33.794811 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:34 crc kubenswrapper[4953]: I1011 02:48:34.795077 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:34 crc kubenswrapper[4953]: I1011 02:48:34.795130 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:34 crc kubenswrapper[4953]: E1011 02:48:34.795291 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:34 crc kubenswrapper[4953]: E1011 02:48:34.795451 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:34 crc kubenswrapper[4953]: E1011 02:48:34.911235 4953 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 11 02:48:35 crc kubenswrapper[4953]: I1011 02:48:35.795319 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:35 crc kubenswrapper[4953]: I1011 02:48:35.795432 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:35 crc kubenswrapper[4953]: E1011 02:48:35.795583 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:35 crc kubenswrapper[4953]: E1011 02:48:35.795691 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:36 crc kubenswrapper[4953]: I1011 02:48:36.794832 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:36 crc kubenswrapper[4953]: I1011 02:48:36.794903 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:36 crc kubenswrapper[4953]: E1011 02:48:36.796531 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:36 crc kubenswrapper[4953]: E1011 02:48:36.796740 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:37 crc kubenswrapper[4953]: I1011 02:48:37.795125 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:37 crc kubenswrapper[4953]: E1011 02:48:37.795306 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:37 crc kubenswrapper[4953]: I1011 02:48:37.795914 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:37 crc kubenswrapper[4953]: E1011 02:48:37.796318 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:38 crc kubenswrapper[4953]: I1011 02:48:38.794734 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:38 crc kubenswrapper[4953]: I1011 02:48:38.794735 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:38 crc kubenswrapper[4953]: E1011 02:48:38.794904 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:38 crc kubenswrapper[4953]: E1011 02:48:38.794971 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:39 crc kubenswrapper[4953]: I1011 02:48:39.796743 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:39 crc kubenswrapper[4953]: E1011 02:48:39.796866 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:39 crc kubenswrapper[4953]: I1011 02:48:39.797157 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:39 crc kubenswrapper[4953]: E1011 02:48:39.797217 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:39 crc kubenswrapper[4953]: E1011 02:48:39.912105 4953 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 11 02:48:40 crc kubenswrapper[4953]: I1011 02:48:40.795411 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:40 crc kubenswrapper[4953]: I1011 02:48:40.795411 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:40 crc kubenswrapper[4953]: E1011 02:48:40.795624 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:40 crc kubenswrapper[4953]: E1011 02:48:40.795751 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:41 crc kubenswrapper[4953]: I1011 02:48:41.795130 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:41 crc kubenswrapper[4953]: I1011 02:48:41.795230 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:41 crc kubenswrapper[4953]: E1011 02:48:41.795367 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:41 crc kubenswrapper[4953]: E1011 02:48:41.795671 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:42 crc kubenswrapper[4953]: I1011 02:48:42.795587 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:42 crc kubenswrapper[4953]: E1011 02:48:42.795827 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:42 crc kubenswrapper[4953]: I1011 02:48:42.795956 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:42 crc kubenswrapper[4953]: E1011 02:48:42.796214 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:42 crc kubenswrapper[4953]: I1011 02:48:42.798527 4953 scope.go:117] "RemoveContainer" containerID="c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87" Oct 11 02:48:43 crc kubenswrapper[4953]: I1011 02:48:43.661353 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/1.log" Oct 11 02:48:43 crc kubenswrapper[4953]: I1011 02:48:43.661993 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerStarted","Data":"8c53004c81306673421879569a8658cdf8f9544e30d13f1f6d2877014bd16313"} Oct 11 02:48:43 crc kubenswrapper[4953]: I1011 02:48:43.795449 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:43 crc kubenswrapper[4953]: E1011 02:48:43.795704 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 11 02:48:43 crc kubenswrapper[4953]: I1011 02:48:43.795954 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:43 crc kubenswrapper[4953]: E1011 02:48:43.796135 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 11 02:48:44 crc kubenswrapper[4953]: I1011 02:48:44.794960 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:44 crc kubenswrapper[4953]: I1011 02:48:44.795029 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:44 crc kubenswrapper[4953]: E1011 02:48:44.795478 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 11 02:48:44 crc kubenswrapper[4953]: E1011 02:48:44.795663 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bp9sq" podUID="329460ba-d6c9-4774-b8d3-354e4406575c" Oct 11 02:48:45 crc kubenswrapper[4953]: I1011 02:48:45.795247 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:45 crc kubenswrapper[4953]: I1011 02:48:45.795247 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:45 crc kubenswrapper[4953]: I1011 02:48:45.798690 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 11 02:48:45 crc kubenswrapper[4953]: I1011 02:48:45.799103 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 11 02:48:45 crc kubenswrapper[4953]: I1011 02:48:45.800495 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 11 02:48:45 crc kubenswrapper[4953]: I1011 02:48:45.800820 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 11 02:48:46 crc kubenswrapper[4953]: I1011 02:48:46.795226 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:46 crc kubenswrapper[4953]: I1011 02:48:46.795313 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:48:46 crc kubenswrapper[4953]: I1011 02:48:46.797978 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 11 02:48:46 crc kubenswrapper[4953]: I1011 02:48:46.798108 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 11 02:48:47 crc kubenswrapper[4953]: I1011 02:48:47.067976 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.208455 4953 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.253296 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6tcb8"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.253883 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.257903 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.258443 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.259368 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t5tdt"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.260907 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.268405 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.268406 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.268642 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.269866 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.270076 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271263 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ttfpq"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271324 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271422 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271573 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271870 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271928 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.271968 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.272002 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.272344 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.272374 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.272577 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.272762 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.274067 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-cv9v6"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.274589 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.277244 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.277493 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.277703 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278311 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278368 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278407 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278594 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278768 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278831 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.278950 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.279262 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.279331 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7gjs6"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.279889 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.280564 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.282003 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.284244 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.284653 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9v76x"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.285213 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.289968 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.290204 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.290500 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.290566 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.292249 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5p48"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.292308 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.292544 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.292772 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.292866 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.292894 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.293357 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.293446 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.293839 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.294760 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.294995 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.295997 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.296496 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.296934 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.297366 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.302799 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.305074 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.309781 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.310098 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.310253 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.310459 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.310636 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.310804 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.311007 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.311161 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.311539 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.315624 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fg6dt"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.316601 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.317038 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.317237 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.317498 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.324947 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.327822 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.328909 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.325930 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.326506 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.326693 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.327642 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.328362 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.328896 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.328909 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.328980 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.329139 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.329330 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.329484 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.329837 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.329903 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.329960 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.330247 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.330654 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.343833 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.344928 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.345438 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.345621 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.347281 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.347882 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.348889 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.351473 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-67499"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.352069 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.352229 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.352291 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.353252 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.353357 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.353412 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.353927 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.353997 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.354556 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355052 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbvgf\" (UniqueName: \"kubernetes.io/projected/2db13bfc-b49d-49d5-b055-2befef69d136-kube-api-access-gbvgf\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355089 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355106 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-oauth-config\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355128 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-etcd-serving-ca\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355144 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-encryption-config\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355162 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-trusted-ca\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355193 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj8m4\" (UniqueName: \"kubernetes.io/projected/d55f397e-a527-4b11-ad27-3f3b986d0985-kube-api-access-mj8m4\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355210 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-audit\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355226 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-oauth-serving-cert\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355244 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-serving-cert\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355259 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d55f397e-a527-4b11-ad27-3f3b986d0985-serving-cert\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355275 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355291 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355316 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/def79d52-fc53-4ab1-81c4-d67959b5865f-serving-cert\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355333 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355347 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355362 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26wv7\" (UniqueName: \"kubernetes.io/projected/eef84f8c-6025-40be-8289-bd7b4c6e5a55-kube-api-access-26wv7\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355380 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-etcd-client\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355395 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92wgt\" (UniqueName: \"kubernetes.io/projected/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-kube-api-access-92wgt\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355417 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-policies\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355433 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-dir\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355448 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-config\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355462 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-encryption-config\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355476 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-config\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355494 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xlkl\" (UniqueName: \"kubernetes.io/projected/e05c4996-7333-41b0-b58d-8471886c9e2a-kube-api-access-5xlkl\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355509 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f7rj\" (UniqueName: \"kubernetes.io/projected/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-kube-api-access-8f7rj\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355524 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-config\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355538 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355555 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2db13bfc-b49d-49d5-b055-2befef69d136-serving-cert\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355570 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-audit-policies\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355586 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-service-ca\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355603 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/eef84f8c-6025-40be-8289-bd7b4c6e5a55-node-pullsecrets\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355633 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-serving-cert\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355648 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-trusted-ca-bundle\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355664 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/48839d24-ad26-4c8c-85dd-822259056c44-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lnwjz\" (UID: \"48839d24-ad26-4c8c-85dd-822259056c44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355681 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-serving-cert\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355696 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-config\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355713 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355727 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-client-ca\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355744 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-serving-cert\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355758 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355774 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-config\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355796 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-config\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355810 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-image-import-ca\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355824 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv5mh\" (UniqueName: \"kubernetes.io/projected/48839d24-ad26-4c8c-85dd-822259056c44-kube-api-access-lv5mh\") pod \"cluster-samples-operator-665b6dd947-lnwjz\" (UID: \"48839d24-ad26-4c8c-85dd-822259056c44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355840 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-config\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355857 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-service-ca-bundle\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355873 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxv8p\" (UniqueName: \"kubernetes.io/projected/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-kube-api-access-xxv8p\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355895 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355912 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355930 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vqqg\" (UniqueName: \"kubernetes.io/projected/e384c720-8dfd-405b-a45d-1182d4fcdd5b-kube-api-access-8vqqg\") pod \"dns-operator-744455d44c-7gjs6\" (UID: \"e384c720-8dfd-405b-a45d-1182d4fcdd5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355946 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e384c720-8dfd-405b-a45d-1182d4fcdd5b-metrics-tls\") pod \"dns-operator-744455d44c-7gjs6\" (UID: \"e384c720-8dfd-405b-a45d-1182d4fcdd5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355964 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4gtw\" (UniqueName: \"kubernetes.io/projected/e8a572a4-1181-4e1e-8169-4c481c6b84b7-kube-api-access-q4gtw\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355981 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-machine-approver-tls\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.355997 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xmtg\" (UniqueName: \"kubernetes.io/projected/def79d52-fc53-4ab1-81c4-d67959b5865f-kube-api-access-7xmtg\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356012 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-client-ca\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356027 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356042 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-etcd-client\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356057 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e8a572a4-1181-4e1e-8169-4c481c6b84b7-audit-dir\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356079 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-auth-proxy-config\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356095 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356113 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356129 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356147 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356162 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/eef84f8c-6025-40be-8289-bd7b4c6e5a55-audit-dir\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.356179 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.364340 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dxblc"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.365098 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8zbjh"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.365376 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.365687 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.365788 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.366125 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.366502 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.366879 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.366998 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367137 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367301 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367031 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367423 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367519 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367681 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367707 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367790 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367590 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367932 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367961 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368064 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368097 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368168 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368186 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368066 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368278 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367874 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.368405 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.367927 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.371193 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.373590 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hxx9r"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.393402 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.395082 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.396933 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.404511 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.404582 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.415399 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.416135 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.416622 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.416687 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.416955 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.424665 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.426203 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.427265 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.438350 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5c675"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.439803 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.440095 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.441117 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-svdgp"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.441310 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.441669 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.442365 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.442451 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.443458 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-hq74r"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.443991 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.444695 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.445857 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.446381 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.446749 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.447063 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6jhcz"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.447472 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.448256 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2kzjf"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.449250 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.449420 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7gjs6"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.451147 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6tcb8"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.452375 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.452907 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.454549 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.455855 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.456300 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457293 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-client-ca\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457323 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-serving-cert\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457345 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457361 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-config\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457385 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8216420e-4379-4b28-9570-9766833d3a54-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457406 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js2zz\" (UniqueName: \"kubernetes.io/projected/71033171-b236-43d6-974b-25553eb12ffa-kube-api-access-js2zz\") pod \"multus-admission-controller-857f4d67dd-hxx9r\" (UID: \"71033171-b236-43d6-974b-25553eb12ffa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457432 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-image-import-ca\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457451 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b1df59-2adf-4c42-895b-4218ea5d6aee-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457490 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll8dq\" (UniqueName: \"kubernetes.io/projected/78f5dc04-9b51-442a-b90f-59aa2145c73b-kube-api-access-ll8dq\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457532 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-config\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457557 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-config\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457583 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/78f5dc04-9b51-442a-b90f-59aa2145c73b-images\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457621 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv5mh\" (UniqueName: \"kubernetes.io/projected/48839d24-ad26-4c8c-85dd-822259056c44-kube-api-access-lv5mh\") pod \"cluster-samples-operator-665b6dd947-lnwjz\" (UID: \"48839d24-ad26-4c8c-85dd-822259056c44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457637 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48e2299c-5d8e-487c-a356-9c131cd9e38d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457654 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-service-ca-bundle\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457673 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxv8p\" (UniqueName: \"kubernetes.io/projected/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-kube-api-access-xxv8p\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457703 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457739 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457762 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vqqg\" (UniqueName: \"kubernetes.io/projected/e384c720-8dfd-405b-a45d-1182d4fcdd5b-kube-api-access-8vqqg\") pod \"dns-operator-744455d44c-7gjs6\" (UID: \"e384c720-8dfd-405b-a45d-1182d4fcdd5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457802 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e384c720-8dfd-405b-a45d-1182d4fcdd5b-metrics-tls\") pod \"dns-operator-744455d44c-7gjs6\" (UID: \"e384c720-8dfd-405b-a45d-1182d4fcdd5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457826 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4gtw\" (UniqueName: \"kubernetes.io/projected/e8a572a4-1181-4e1e-8169-4c481c6b84b7-kube-api-access-q4gtw\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457841 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-machine-approver-tls\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457858 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/78f5dc04-9b51-442a-b90f-59aa2145c73b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457877 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xmtg\" (UniqueName: \"kubernetes.io/projected/def79d52-fc53-4ab1-81c4-d67959b5865f-kube-api-access-7xmtg\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457894 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c3c1bc5-1fc0-4646-979e-33b5a967b866-config\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457909 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-client-ca\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457926 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457942 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78f5dc04-9b51-442a-b90f-59aa2145c73b-config\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457956 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-proxy-tls\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457972 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8d43eab6-53c3-4e4c-9795-3757f43aa46e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.457989 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xqld\" (UniqueName: \"kubernetes.io/projected/9c3c1bc5-1fc0-4646-979e-33b5a967b866-kube-api-access-6xqld\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458005 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-etcd-client\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458022 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48e2299c-5d8e-487c-a356-9c131cd9e38d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458039 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e8a572a4-1181-4e1e-8169-4c481c6b84b7-audit-dir\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458056 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-auth-proxy-config\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458074 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458092 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458110 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458132 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458152 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/eef84f8c-6025-40be-8289-bd7b4c6e5a55-audit-dir\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458167 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458222 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458239 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-oauth-config\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458257 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-etcd-serving-ca\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458274 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbvgf\" (UniqueName: \"kubernetes.io/projected/2db13bfc-b49d-49d5-b055-2befef69d136-kube-api-access-gbvgf\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458300 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-encryption-config\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458318 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/71033171-b236-43d6-974b-25553eb12ffa-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hxx9r\" (UID: \"71033171-b236-43d6-974b-25553eb12ffa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458334 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-trusted-ca\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458351 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfgv8\" (UniqueName: \"kubernetes.io/projected/8216420e-4379-4b28-9570-9766833d3a54-kube-api-access-lfgv8\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458367 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj8m4\" (UniqueName: \"kubernetes.io/projected/d55f397e-a527-4b11-ad27-3f3b986d0985-kube-api-access-mj8m4\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458383 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-audit\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458406 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b1df59-2adf-4c42-895b-4218ea5d6aee-config\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458409 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-client-ca\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458423 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29cc960-b47b-494a-81bd-617e97aed612-config-volume\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458495 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-oauth-serving-cert\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458535 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-serving-cert\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458567 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458586 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nthlj\" (UniqueName: \"kubernetes.io/projected/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-kube-api-access-nthlj\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458627 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d55f397e-a527-4b11-ad27-3f3b986d0985-serving-cert\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458646 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458637 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458677 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458725 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/def79d52-fc53-4ab1-81c4-d67959b5865f-serving-cert\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458759 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458786 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458816 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458838 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26wv7\" (UniqueName: \"kubernetes.io/projected/eef84f8c-6025-40be-8289-bd7b4c6e5a55-kube-api-access-26wv7\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458860 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-etcd-client\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458891 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92wgt\" (UniqueName: \"kubernetes.io/projected/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-kube-api-access-92wgt\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458919 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zflcd\" (UniqueName: \"kubernetes.io/projected/454cbf4b-d2d3-4967-8f93-f47e06b06886-kube-api-access-zflcd\") pod \"migrator-59844c95c7-qpr8t\" (UID: \"454cbf4b-d2d3-4967-8f93-f47e06b06886\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458941 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c3c1bc5-1fc0-4646-979e-33b5a967b866-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.458968 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48e2299c-5d8e-487c-a356-9c131cd9e38d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459006 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-policies\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459024 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-dir\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459042 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-config\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459060 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-encryption-config\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459081 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8216420e-4379-4b28-9570-9766833d3a54-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459109 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459134 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xlkl\" (UniqueName: \"kubernetes.io/projected/e05c4996-7333-41b0-b58d-8471886c9e2a-kube-api-access-5xlkl\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459152 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f7rj\" (UniqueName: \"kubernetes.io/projected/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-kube-api-access-8f7rj\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459170 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-config\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459195 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29cc960-b47b-494a-81bd-617e97aed612-secret-volume\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459212 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-config\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459231 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d43eab6-53c3-4e4c-9795-3757f43aa46e-serving-cert\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459246 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmrgg\" (UniqueName: \"kubernetes.io/projected/8d43eab6-53c3-4e4c-9795-3757f43aa46e-kube-api-access-hmrgg\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459266 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459283 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2db13bfc-b49d-49d5-b055-2befef69d136-serving-cert\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459301 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-audit-policies\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459325 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b1df59-2adf-4c42-895b-4218ea5d6aee-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459357 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-service-ca\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459376 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/eef84f8c-6025-40be-8289-bd7b4c6e5a55-node-pullsecrets\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459401 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-serving-cert\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459423 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdxsh\" (UniqueName: \"kubernetes.io/projected/a29cc960-b47b-494a-81bd-617e97aed612-kube-api-access-vdxsh\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459433 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-config\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459453 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-trusted-ca-bundle\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459482 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-image-import-ca\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459483 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-serving-cert\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459533 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-config\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459574 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/48839d24-ad26-4c8c-85dd-822259056c44-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lnwjz\" (UID: \"48839d24-ad26-4c8c-85dd-822259056c44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459600 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrhhq\" (UniqueName: \"kubernetes.io/projected/26d51156-6dc6-4d83-8d51-f2835538a3a9-kube-api-access-zrhhq\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459640 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.459923 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-dir\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.461152 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.462018 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-policies\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.462324 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e8a572a4-1181-4e1e-8169-4c481c6b84b7-audit-dir\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.462586 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-oauth-serving-cert\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.463065 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.463315 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.463440 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-auth-proxy-config\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.463771 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-config\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.464092 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-config\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.464259 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-service-ca-bundle\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.464317 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d55f397e-a527-4b11-ad27-3f3b986d0985-serving-cert\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.465315 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-config\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.465529 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.465969 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.466049 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-serving-cert\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.466149 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d55f397e-a527-4b11-ad27-3f3b986d0985-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.466723 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-client-ca\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.466792 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/eef84f8c-6025-40be-8289-bd7b4c6e5a55-audit-dir\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.467190 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-trusted-ca-bundle\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468058 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-trusted-ca-bundle\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468172 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468190 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468436 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-audit\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468658 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468678 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e8a572a4-1181-4e1e-8169-4c481c6b84b7-audit-policies\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.468581 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469117 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-config\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469127 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-etcd-client\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469186 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-config\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469540 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-etcd-client\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469587 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t5tdt"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469646 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.469921 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/eef84f8c-6025-40be-8289-bd7b4c6e5a55-node-pullsecrets\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.470132 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2db13bfc-b49d-49d5-b055-2befef69d136-serving-cert\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.470251 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/eef84f8c-6025-40be-8289-bd7b4c6e5a55-etcd-serving-ca\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.470377 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-service-ca\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.470675 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ttfpq"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.470748 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-serving-cert\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.471397 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-serving-cert\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.471553 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.471648 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-config\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.471746 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-trusted-ca\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472023 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472110 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e384c720-8dfd-405b-a45d-1182d4fcdd5b-metrics-tls\") pod \"dns-operator-744455d44c-7gjs6\" (UID: \"e384c720-8dfd-405b-a45d-1182d4fcdd5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472153 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-serving-cert\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472236 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/def79d52-fc53-4ab1-81c4-d67959b5865f-serving-cert\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472734 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472300 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/eef84f8c-6025-40be-8289-bd7b4c6e5a55-encryption-config\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.472569 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.473928 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.473970 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-b9qp4"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.474047 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-oauth-config\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.474817 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7rdbv"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.475123 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e8a572a4-1181-4e1e-8169-4c481c6b84b7-encryption-config\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.475221 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.475280 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.475588 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-rzpfj"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.476318 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.476411 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.477060 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/48839d24-ad26-4c8c-85dd-822259056c44-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-lnwjz\" (UID: \"48839d24-ad26-4c8c-85dd-822259056c44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.477641 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-machine-approver-tls\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.477846 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.478784 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cv9v6"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.479786 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.481446 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.481805 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.482530 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.489041 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.496805 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.500525 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-svdgp"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.502261 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8zbjh"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.503571 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.506529 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.509639 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fg6dt"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.511981 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.513662 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hxx9r"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.516213 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.516451 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.518618 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5p48"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.520826 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-67499"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.522196 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dxblc"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.524899 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9v76x"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.527096 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.528712 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.531802 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.533424 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5c675"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.535769 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.536272 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-qp84s"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.537339 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.537645 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vlk7d"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.539622 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.539647 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.539707 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.540851 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-b9qp4"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.542121 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.543282 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.544369 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6jhcz"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.546630 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2kzjf"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.548652 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-qp84s"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.550274 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vlk7d"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.551495 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7rdbv"] Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.555747 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560396 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560498 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29cc960-b47b-494a-81bd-617e97aed612-secret-volume\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560560 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-config\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560582 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1248ce56-3a40-4896-8df6-453d36f01910-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560664 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d43eab6-53c3-4e4c-9795-3757f43aa46e-serving-cert\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560689 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmrgg\" (UniqueName: \"kubernetes.io/projected/8d43eab6-53c3-4e4c-9795-3757f43aa46e-kube-api-access-hmrgg\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560740 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/19f90a61-22fa-4d99-93b6-72520fed2d71-cert\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560843 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pptzh\" (UniqueName: \"kubernetes.io/projected/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-kube-api-access-pptzh\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560892 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdxsh\" (UniqueName: \"kubernetes.io/projected/a29cc960-b47b-494a-81bd-617e97aed612-kube-api-access-vdxsh\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560916 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.560968 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-signing-key\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561000 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1248ce56-3a40-4896-8df6-453d36f01910-images\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561083 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll8dq\" (UniqueName: \"kubernetes.io/projected/78f5dc04-9b51-442a-b90f-59aa2145c73b-kube-api-access-ll8dq\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561116 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsvbd\" (UniqueName: \"kubernetes.io/projected/387ac181-16c9-4d29-ad4e-4c4aca7f294e-kube-api-access-lsvbd\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561143 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/78f5dc04-9b51-442a-b90f-59aa2145c73b-images\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561160 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561209 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/387ac181-16c9-4d29-ad4e-4c4aca7f294e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561231 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-srv-cert\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561286 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/80bba440-e5e2-4c21-befa-59ef185a7295-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qcfl9\" (UID: \"80bba440-e5e2-4c21-befa-59ef185a7295\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561306 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf23c33a-57d1-4671-a0d2-e55556106948-trusted-ca\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561411 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7w82\" (UniqueName: \"kubernetes.io/projected/4ed84470-d800-4f9a-b387-bf40ddfbc70e-kube-api-access-c7w82\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561521 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/78f5dc04-9b51-442a-b90f-59aa2145c73b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561552 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-signing-cabundle\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561571 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzjcp\" (UniqueName: \"kubernetes.io/projected/93a1092d-11b9-4b17-bf72-e7296d56dbf3-kube-api-access-hzjcp\") pod \"downloads-7954f5f757-7rdbv\" (UID: \"93a1092d-11b9-4b17-bf72-e7296d56dbf3\") " pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561632 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561659 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-default-certificate\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561696 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-stats-auth\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561731 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-proxy-tls\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561757 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3d36f1a-0429-4531-a820-10cd42bf7414-config\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561784 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-certs\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561819 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-config\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.561927 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ed84470-d800-4f9a-b387-bf40ddfbc70e-apiservice-cert\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562025 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/78f5dc04-9b51-442a-b90f-59aa2145c73b-images\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562206 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfgv8\" (UniqueName: \"kubernetes.io/projected/8216420e-4379-4b28-9570-9766833d3a54-kube-api-access-lfgv8\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562265 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf23c33a-57d1-4671-a0d2-e55556106948-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562295 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-service-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562322 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/a00c4875-8a62-4b96-bc81-d879dff1d91d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-b92s9\" (UID: \"a00c4875-8a62-4b96-bc81-d879dff1d91d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562501 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562541 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhrz5\" (UniqueName: \"kubernetes.io/projected/e385b9db-8b22-4cdc-95bc-595bc976ac27-kube-api-access-dhrz5\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562590 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jqww\" (UniqueName: \"kubernetes.io/projected/1248ce56-3a40-4896-8df6-453d36f01910-kube-api-access-8jqww\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562672 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zhsb\" (UniqueName: \"kubernetes.io/projected/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-kube-api-access-8zhsb\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562709 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c3c1bc5-1fc0-4646-979e-33b5a967b866-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8216420e-4379-4b28-9570-9766833d3a54-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562779 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-profile-collector-cert\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562816 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1248ce56-3a40-4896-8df6-453d36f01910-proxy-tls\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562856 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b1df59-2adf-4c42-895b-4218ea5d6aee-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562886 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e385b9db-8b22-4cdc-95bc-595bc976ac27-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562950 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrhhq\" (UniqueName: \"kubernetes.io/projected/26d51156-6dc6-4d83-8d51-f2835538a3a9-kube-api-access-zrhhq\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.562979 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8216420e-4379-4b28-9570-9766833d3a54-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563005 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js2zz\" (UniqueName: \"kubernetes.io/projected/71033171-b236-43d6-974b-25553eb12ffa-kube-api-access-js2zz\") pod \"multus-admission-controller-857f4d67dd-hxx9r\" (UID: \"71033171-b236-43d6-974b-25553eb12ffa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563084 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-node-bootstrap-token\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563129 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b1df59-2adf-4c42-895b-4218ea5d6aee-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563172 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf23c33a-57d1-4671-a0d2-e55556106948-metrics-tls\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563201 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48e2299c-5d8e-487c-a356-9c131cd9e38d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563227 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4ed84470-d800-4f9a-b387-bf40ddfbc70e-tmpfs\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563262 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/387ac181-16c9-4d29-ad4e-4c4aca7f294e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563541 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c3c1bc5-1fc0-4646-979e-33b5a967b866-config\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563573 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-metrics-certs\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563646 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78f5dc04-9b51-442a-b90f-59aa2145c73b-config\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563728 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e385b9db-8b22-4cdc-95bc-595bc976ac27-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563782 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9xmn\" (UniqueName: \"kubernetes.io/projected/d61d0811-775b-42d0-b605-f0d6a8b2dbce-kube-api-access-q9xmn\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563827 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48e2299c-5d8e-487c-a356-9c131cd9e38d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563856 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8d43eab6-53c3-4e4c-9795-3757f43aa46e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.563884 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xqld\" (UniqueName: \"kubernetes.io/projected/9c3c1bc5-1fc0-4646-979e-33b5a967b866-kube-api-access-6xqld\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564026 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wsrs\" (UniqueName: \"kubernetes.io/projected/bf23c33a-57d1-4671-a0d2-e55556106948-kube-api-access-9wsrs\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564060 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-client\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564089 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s74dz\" (UniqueName: \"kubernetes.io/projected/090fd335-08ca-49b6-beb6-80dc582340a1-kube-api-access-s74dz\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564119 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j42t6\" (UniqueName: \"kubernetes.io/projected/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-kube-api-access-j42t6\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564208 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48e2299c-5d8e-487c-a356-9c131cd9e38d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564259 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8216420e-4379-4b28-9570-9766833d3a54-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564416 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/71033171-b236-43d6-974b-25553eb12ffa-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hxx9r\" (UID: \"71033171-b236-43d6-974b-25553eb12ffa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564491 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78f5dc04-9b51-442a-b90f-59aa2145c73b-config\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564508 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29cc960-b47b-494a-81bd-617e97aed612-secret-volume\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564622 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twlnh\" (UniqueName: \"kubernetes.io/projected/80bba440-e5e2-4c21-befa-59ef185a7295-kube-api-access-twlnh\") pod \"control-plane-machine-set-operator-78cbb6b69f-qcfl9\" (UID: \"80bba440-e5e2-4c21-befa-59ef185a7295\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564729 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3d36f1a-0429-4531-a820-10cd42bf7414-serving-cert\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564813 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxctx\" (UniqueName: \"kubernetes.io/projected/a00c4875-8a62-4b96-bc81-d879dff1d91d-kube-api-access-nxctx\") pod \"package-server-manager-789f6589d5-b92s9\" (UID: \"a00c4875-8a62-4b96-bc81-d879dff1d91d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564889 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5g9w\" (UniqueName: \"kubernetes.io/projected/19f90a61-22fa-4d99-93b6-72520fed2d71-kube-api-access-n5g9w\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565000 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b1df59-2adf-4c42-895b-4218ea5d6aee-config\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564813 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8d43eab6-53c3-4e4c-9795-3757f43aa46e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.564671 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/78f5dc04-9b51-442a-b90f-59aa2145c73b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565092 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29cc960-b47b-494a-81bd-617e97aed612-config-volume\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565198 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-srv-cert\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565245 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565271 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nthlj\" (UniqueName: \"kubernetes.io/projected/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-kube-api-access-nthlj\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565296 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ed84470-d800-4f9a-b387-bf40ddfbc70e-webhook-cert\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565322 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s4hg\" (UniqueName: \"kubernetes.io/projected/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-kube-api-access-7s4hg\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565348 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxlq5\" (UniqueName: \"kubernetes.io/projected/d3d36f1a-0429-4531-a820-10cd42bf7414-kube-api-access-vxlq5\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565413 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565481 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-serving-cert\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565522 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/090fd335-08ca-49b6-beb6-80dc582340a1-service-ca-bundle\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565556 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48e2299c-5d8e-487c-a356-9c131cd9e38d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565576 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zflcd\" (UniqueName: \"kubernetes.io/projected/454cbf4b-d2d3-4967-8f93-f47e06b06886-kube-api-access-zflcd\") pod \"migrator-59844c95c7-qpr8t\" (UID: \"454cbf4b-d2d3-4967-8f93-f47e06b06886\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.565987 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/387ac181-16c9-4d29-ad4e-4c4aca7f294e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.566391 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b1df59-2adf-4c42-895b-4218ea5d6aee-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.566591 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-proxy-tls\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.566961 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b1df59-2adf-4c42-895b-4218ea5d6aee-config\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.566997 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.567479 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8216420e-4379-4b28-9570-9766833d3a54-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.575183 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48e2299c-5d8e-487c-a356-9c131cd9e38d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.576662 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.578453 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29cc960-b47b-494a-81bd-617e97aed612-config-volume\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.598195 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.615961 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.637085 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.647347 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8d43eab6-53c3-4e4c-9795-3757f43aa46e-serving-cert\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.658967 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667235 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3d36f1a-0429-4531-a820-10cd42bf7414-config\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667298 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-config\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667319 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-certs\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667343 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ed84470-d800-4f9a-b387-bf40ddfbc70e-apiservice-cert\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667393 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf23c33a-57d1-4671-a0d2-e55556106948-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667411 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-service-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667450 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/a00c4875-8a62-4b96-bc81-d879dff1d91d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-b92s9\" (UID: \"a00c4875-8a62-4b96-bc81-d879dff1d91d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667476 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667503 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhrz5\" (UniqueName: \"kubernetes.io/projected/e385b9db-8b22-4cdc-95bc-595bc976ac27-kube-api-access-dhrz5\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667524 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zhsb\" (UniqueName: \"kubernetes.io/projected/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-kube-api-access-8zhsb\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667542 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jqww\" (UniqueName: \"kubernetes.io/projected/1248ce56-3a40-4896-8df6-453d36f01910-kube-api-access-8jqww\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667573 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-profile-collector-cert\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667601 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1248ce56-3a40-4896-8df6-453d36f01910-proxy-tls\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667645 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e385b9db-8b22-4cdc-95bc-595bc976ac27-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667681 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-node-bootstrap-token\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667713 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf23c33a-57d1-4671-a0d2-e55556106948-metrics-tls\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667730 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4ed84470-d800-4f9a-b387-bf40ddfbc70e-tmpfs\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667760 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/387ac181-16c9-4d29-ad4e-4c4aca7f294e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667800 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-metrics-certs\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667824 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wsrs\" (UniqueName: \"kubernetes.io/projected/bf23c33a-57d1-4671-a0d2-e55556106948-kube-api-access-9wsrs\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667844 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e385b9db-8b22-4cdc-95bc-595bc976ac27-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667865 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9xmn\" (UniqueName: \"kubernetes.io/projected/d61d0811-775b-42d0-b605-f0d6a8b2dbce-kube-api-access-q9xmn\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667884 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-client\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667906 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s74dz\" (UniqueName: \"kubernetes.io/projected/090fd335-08ca-49b6-beb6-80dc582340a1-kube-api-access-s74dz\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667927 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j42t6\" (UniqueName: \"kubernetes.io/projected/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-kube-api-access-j42t6\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667955 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twlnh\" (UniqueName: \"kubernetes.io/projected/80bba440-e5e2-4c21-befa-59ef185a7295-kube-api-access-twlnh\") pod \"control-plane-machine-set-operator-78cbb6b69f-qcfl9\" (UID: \"80bba440-e5e2-4c21-befa-59ef185a7295\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.667973 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3d36f1a-0429-4531-a820-10cd42bf7414-serving-cert\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668001 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxctx\" (UniqueName: \"kubernetes.io/projected/a00c4875-8a62-4b96-bc81-d879dff1d91d-kube-api-access-nxctx\") pod \"package-server-manager-789f6589d5-b92s9\" (UID: \"a00c4875-8a62-4b96-bc81-d879dff1d91d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668022 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5g9w\" (UniqueName: \"kubernetes.io/projected/19f90a61-22fa-4d99-93b6-72520fed2d71-kube-api-access-n5g9w\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668040 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-srv-cert\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668063 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ed84470-d800-4f9a-b387-bf40ddfbc70e-webhook-cert\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668081 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s4hg\" (UniqueName: \"kubernetes.io/projected/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-kube-api-access-7s4hg\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668100 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxlq5\" (UniqueName: \"kubernetes.io/projected/d3d36f1a-0429-4531-a820-10cd42bf7414-kube-api-access-vxlq5\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668126 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-serving-cert\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668156 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/090fd335-08ca-49b6-beb6-80dc582340a1-service-ca-bundle\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668182 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/387ac181-16c9-4d29-ad4e-4c4aca7f294e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668225 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-config\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668244 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1248ce56-3a40-4896-8df6-453d36f01910-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668263 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/19f90a61-22fa-4d99-93b6-72520fed2d71-cert\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668286 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pptzh\" (UniqueName: \"kubernetes.io/projected/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-kube-api-access-pptzh\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668324 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668347 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-signing-key\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668368 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1248ce56-3a40-4896-8df6-453d36f01910-images\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668394 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsvbd\" (UniqueName: \"kubernetes.io/projected/387ac181-16c9-4d29-ad4e-4c4aca7f294e-kube-api-access-lsvbd\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668419 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668422 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4ed84470-d800-4f9a-b387-bf40ddfbc70e-tmpfs\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668441 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/387ac181-16c9-4d29-ad4e-4c4aca7f294e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668524 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-srv-cert\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668549 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/80bba440-e5e2-4c21-befa-59ef185a7295-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qcfl9\" (UID: \"80bba440-e5e2-4c21-befa-59ef185a7295\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668593 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf23c33a-57d1-4671-a0d2-e55556106948-trusted-ca\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668668 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7w82\" (UniqueName: \"kubernetes.io/projected/4ed84470-d800-4f9a-b387-bf40ddfbc70e-kube-api-access-c7w82\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668712 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-signing-cabundle\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668760 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzjcp\" (UniqueName: \"kubernetes.io/projected/93a1092d-11b9-4b17-bf72-e7296d56dbf3-kube-api-access-hzjcp\") pod \"downloads-7954f5f757-7rdbv\" (UID: \"93a1092d-11b9-4b17-bf72-e7296d56dbf3\") " pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668794 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668820 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-default-certificate\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.668846 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-stats-auth\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.669324 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1248ce56-3a40-4896-8df6-453d36f01910-auth-proxy-config\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.672325 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.672418 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-profile-collector-cert\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.676754 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.696000 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.716638 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.737068 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.747487 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c3c1bc5-1fc0-4646-979e-33b5a967b866-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.756387 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.764860 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c3c1bc5-1fc0-4646-979e-33b5a967b866-config\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.777118 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.796676 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.816230 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.830211 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.836348 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.863051 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.873134 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.876325 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.888350 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/71033171-b236-43d6-974b-25553eb12ffa-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hxx9r\" (UID: \"71033171-b236-43d6-974b-25553eb12ffa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.896757 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.916737 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.937257 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.941349 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ed84470-d800-4f9a-b387-bf40ddfbc70e-apiservice-cert\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.941641 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ed84470-d800-4f9a-b387-bf40ddfbc70e-webhook-cert\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.956435 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.961133 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-srv-cert\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.976344 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 11 02:48:49 crc kubenswrapper[4953]: I1011 02:48:49.996091 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.001308 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/80bba440-e5e2-4c21-befa-59ef185a7295-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-qcfl9\" (UID: \"80bba440-e5e2-4c21-befa-59ef185a7295\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.016202 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.036187 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.056568 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.075647 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.079347 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e385b9db-8b22-4cdc-95bc-595bc976ac27-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.096956 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.102540 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e385b9db-8b22-4cdc-95bc-595bc976ac27-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.140020 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.157393 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.161242 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1248ce56-3a40-4896-8df6-453d36f01910-proxy-tls\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.177008 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.179304 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1248ce56-3a40-4896-8df6-453d36f01910-images\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.197094 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.201345 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/a00c4875-8a62-4b96-bc81-d879dff1d91d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-b92s9\" (UID: \"a00c4875-8a62-4b96-bc81-d879dff1d91d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.216970 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.236745 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.257141 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.277126 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.301576 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.310426 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/387ac181-16c9-4d29-ad4e-4c4aca7f294e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.316692 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.322798 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/387ac181-16c9-4d29-ad4e-4c4aca7f294e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.337290 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.356955 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.363707 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-default-certificate\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.377022 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.381859 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-metrics-certs\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.397016 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.403133 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/090fd335-08ca-49b6-beb6-80dc582340a1-stats-auth\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.415572 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.419529 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/090fd335-08ca-49b6-beb6-80dc582340a1-service-ca-bundle\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.436896 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.455311 4953 request.go:700] Waited for 1.010780858s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.457339 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.476999 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.489518 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-srv-cert\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.497354 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.516351 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.536590 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.543893 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3d36f1a-0429-4531-a820-10cd42bf7414-serving-cert\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.557322 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.559965 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3d36f1a-0429-4531-a820-10cd42bf7414-config\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.576796 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.597195 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.617424 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.636662 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.644042 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-signing-key\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.658708 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.660679 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-signing-cabundle\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.667892 4953 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668012 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-certs podName:d61d0811-775b-42d0-b605-f0d6a8b2dbce nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.167970918 +0000 UTC m=+142.101058562 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-certs") pod "machine-config-server-rzpfj" (UID: "d61d0811-775b-42d0-b605-f0d6a8b2dbce") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668276 4953 configmap.go:193] Couldn't get configMap openshift-etcd-operator/etcd-service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668368 4953 secret.go:188] Couldn't get secret openshift-etcd-operator/etcd-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668401 4953 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668409 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-service-ca podName:66c99208-eaa2-43e2-a1ef-19a8fd996e1f nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.168379198 +0000 UTC m=+142.101466872 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-service-ca" (UniqueName: "kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-service-ca") pod "etcd-operator-b45778765-2kzjf" (UID: "66c99208-eaa2-43e2-a1ef-19a8fd996e1f") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668439 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-serving-cert podName:66c99208-eaa2-43e2-a1ef-19a8fd996e1f nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.168425609 +0000 UTC m=+142.101513293 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-serving-cert") pod "etcd-operator-b45778765-2kzjf" (UID: "66c99208-eaa2-43e2-a1ef-19a8fd996e1f") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668469 4953 configmap.go:193] Couldn't get configMap openshift-etcd-operator/etcd-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668495 4953 secret.go:188] Couldn't get secret openshift-etcd-operator/etcd-client: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668518 4953 secret.go:188] Couldn't get secret openshift-ingress-operator/metrics-tls: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668574 4953 configmap.go:193] Couldn't get configMap openshift-kube-controller-manager-operator/kube-controller-manager-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668605 4953 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668661 4953 configmap.go:193] Couldn't get configMap openshift-etcd-operator/etcd-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.668725 4953 configmap.go:193] Couldn't get configMap openshift-ingress-operator/trusted-ca: failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.669806 4953 secret.go:188] Couldn't get secret openshift-kube-controller-manager-operator/kube-controller-manager-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670660 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-node-bootstrap-token podName:d61d0811-775b-42d0-b605-f0d6a8b2dbce nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.1684521 +0000 UTC m=+142.101539774 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-node-bootstrap-token") pod "machine-config-server-rzpfj" (UID: "d61d0811-775b-42d0-b605-f0d6a8b2dbce") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670730 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-config podName:66c99208-eaa2-43e2-a1ef-19a8fd996e1f nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170709348 +0000 UTC m=+142.103797022 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-config") pod "etcd-operator-b45778765-2kzjf" (UID: "66c99208-eaa2-43e2-a1ef-19a8fd996e1f") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670768 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-client podName:66c99208-eaa2-43e2-a1ef-19a8fd996e1f nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170755129 +0000 UTC m=+142.103842803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-client") pod "etcd-operator-b45778765-2kzjf" (UID: "66c99208-eaa2-43e2-a1ef-19a8fd996e1f") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670799 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf23c33a-57d1-4671-a0d2-e55556106948-metrics-tls podName:bf23c33a-57d1-4671-a0d2-e55556106948 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.17078784 +0000 UTC m=+142.103875524 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/bf23c33a-57d1-4671-a0d2-e55556106948-metrics-tls") pod "ingress-operator-5b745b69d9-5t2t2" (UID: "bf23c33a-57d1-4671-a0d2-e55556106948") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670828 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-config podName:d30ad7f6-c8b7-4466-bbf9-00a1ab119270 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170817651 +0000 UTC m=+142.103905325 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-config") pod "kube-controller-manager-operator-78b949d7b-tr9hs" (UID: "d30ad7f6-c8b7-4466-bbf9-00a1ab119270") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670856 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/19f90a61-22fa-4d99-93b6-72520fed2d71-cert podName:19f90a61-22fa-4d99-93b6-72520fed2d71 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170845771 +0000 UTC m=+142.103933455 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/19f90a61-22fa-4d99-93b6-72520fed2d71-cert") pod "ingress-canary-b9qp4" (UID: "19f90a61-22fa-4d99-93b6-72520fed2d71") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670889 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-ca podName:66c99208-eaa2-43e2-a1ef-19a8fd996e1f nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170879352 +0000 UTC m=+142.103967036 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-ca" (UniqueName: "kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-ca") pod "etcd-operator-b45778765-2kzjf" (UID: "66c99208-eaa2-43e2-a1ef-19a8fd996e1f") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670921 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf23c33a-57d1-4671-a0d2-e55556106948-trusted-ca podName:bf23c33a-57d1-4671-a0d2-e55556106948 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170908783 +0000 UTC m=+142.103996457 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca" (UniqueName: "kubernetes.io/configmap/bf23c33a-57d1-4671-a0d2-e55556106948-trusted-ca") pod "ingress-operator-5b745b69d9-5t2t2" (UID: "bf23c33a-57d1-4671-a0d2-e55556106948") : failed to sync configmap cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: E1011 02:48:50.670950 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-serving-cert podName:d30ad7f6-c8b7-4466-bbf9-00a1ab119270 nodeName:}" failed. No retries permitted until 2025-10-11 02:48:51.170939254 +0000 UTC m=+142.104026928 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-serving-cert") pod "kube-controller-manager-operator-78b949d7b-tr9hs" (UID: "d30ad7f6-c8b7-4466-bbf9-00a1ab119270") : failed to sync secret cache: timed out waiting for the condition Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.677531 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.697389 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.715840 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.736808 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.756064 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.776755 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.797461 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.816341 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.837060 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.858024 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.876485 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.897845 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.916729 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.943838 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.957888 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.976663 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 11 02:48:50 crc kubenswrapper[4953]: I1011 02:48:50.996423 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.016681 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.062578 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4gtw\" (UniqueName: \"kubernetes.io/projected/e8a572a4-1181-4e1e-8169-4c481c6b84b7-kube-api-access-q4gtw\") pod \"apiserver-7bbb656c7d-jh2h9\" (UID: \"e8a572a4-1181-4e1e-8169-4c481c6b84b7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.074268 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26wv7\" (UniqueName: \"kubernetes.io/projected/eef84f8c-6025-40be-8289-bd7b4c6e5a55-kube-api-access-26wv7\") pod \"apiserver-76f77b778f-t5tdt\" (UID: \"eef84f8c-6025-40be-8289-bd7b4c6e5a55\") " pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.093393 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxv8p\" (UniqueName: \"kubernetes.io/projected/28f49c49-96ce-44b8-a402-b5a3c84ee5b4-kube-api-access-xxv8p\") pod \"machine-approver-56656f9798-d8xwl\" (UID: \"28f49c49-96ce-44b8-a402-b5a3c84ee5b4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.116197 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv5mh\" (UniqueName: \"kubernetes.io/projected/48839d24-ad26-4c8c-85dd-822259056c44-kube-api-access-lv5mh\") pod \"cluster-samples-operator-665b6dd947-lnwjz\" (UID: \"48839d24-ad26-4c8c-85dd-822259056c44\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.135327 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f7rj\" (UniqueName: \"kubernetes.io/projected/acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a-kube-api-access-8f7rj\") pod \"console-operator-58897d9998-9v76x\" (UID: \"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a\") " pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.135924 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.164400 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xlkl\" (UniqueName: \"kubernetes.io/projected/e05c4996-7333-41b0-b58d-8471886c9e2a-kube-api-access-5xlkl\") pod \"oauth-openshift-558db77b4-s5p48\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.172972 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xmtg\" (UniqueName: \"kubernetes.io/projected/def79d52-fc53-4ab1-81c4-d67959b5865f-kube-api-access-7xmtg\") pod \"controller-manager-879f6c89f-6tcb8\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.198201 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf23c33a-57d1-4671-a0d2-e55556106948-metrics-tls\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.198372 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-client\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.198751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-serving-cert\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.198848 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-config\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.198976 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/19f90a61-22fa-4d99-93b6-72520fed2d71-cert\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199068 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199102 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf23c33a-57d1-4671-a0d2-e55556106948-trusted-ca\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199190 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199275 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-config\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199311 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-certs\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199398 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-service-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.199547 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-node-bootstrap-token\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.200701 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-config\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.200773 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-service-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.201155 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-ca\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.201471 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-config\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.202253 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf23c33a-57d1-4671-a0d2-e55556106948-trusted-ca\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.203207 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.203289 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-etcd-client\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.203557 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vqqg\" (UniqueName: \"kubernetes.io/projected/e384c720-8dfd-405b-a45d-1182d4fcdd5b-kube-api-access-8vqqg\") pod \"dns-operator-744455d44c-7gjs6\" (UID: \"e384c720-8dfd-405b-a45d-1182d4fcdd5b\") " pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.204261 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-serving-cert\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.204271 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bf23c33a-57d1-4671-a0d2-e55556106948-metrics-tls\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.215736 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj8m4\" (UniqueName: \"kubernetes.io/projected/d55f397e-a527-4b11-ad27-3f3b986d0985-kube-api-access-mj8m4\") pod \"authentication-operator-69f744f599-ttfpq\" (UID: \"d55f397e-a527-4b11-ad27-3f3b986d0985\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.229145 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.232843 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92wgt\" (UniqueName: \"kubernetes.io/projected/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-kube-api-access-92wgt\") pod \"console-f9d7485db-cv9v6\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.249212 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.257566 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.258924 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbvgf\" (UniqueName: \"kubernetes.io/projected/2db13bfc-b49d-49d5-b055-2befef69d136-kube-api-access-gbvgf\") pod \"route-controller-manager-6576b87f9c-jpcz9\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.276801 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.284368 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/19f90a61-22fa-4d99-93b6-72520fed2d71-cert\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.297886 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.311706 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.316103 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.316415 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.323473 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.330175 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.337745 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.356418 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.366598 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-certs\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:51 crc kubenswrapper[4953]: W1011 02:48:51.371815 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28f49c49_96ce_44b8_a402_b5a3c84ee5b4.slice/crio-a8eb485b682618211d06a57b15cdfa7b4eb2faadf1eb5de918abc45ff6e2e7f9 WatchSource:0}: Error finding container a8eb485b682618211d06a57b15cdfa7b4eb2faadf1eb5de918abc45ff6e2e7f9: Status 404 returned error can't find the container with id a8eb485b682618211d06a57b15cdfa7b4eb2faadf1eb5de918abc45ff6e2e7f9 Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.377126 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-t5tdt"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.377147 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.391766 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:51 crc kubenswrapper[4953]: W1011 02:48:51.392044 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeef84f8c_6025_40be_8289_bd7b4c6e5a55.slice/crio-b26e7aa5b1359db222bd82aeed1c6b2b32f8c5b1c1f49e7a8f1c2062b1915109 WatchSource:0}: Error finding container b26e7aa5b1359db222bd82aeed1c6b2b32f8c5b1c1f49e7a8f1c2062b1915109: Status 404 returned error can't find the container with id b26e7aa5b1359db222bd82aeed1c6b2b32f8c5b1c1f49e7a8f1c2062b1915109 Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.397832 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.408588 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/d61d0811-775b-42d0-b605-f0d6a8b2dbce-node-bootstrap-token\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.413117 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.436638 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.452093 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.452359 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7gjs6"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.457517 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 11 02:48:51 crc kubenswrapper[4953]: W1011 02:48:51.464718 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode384c720_8dfd_405b_a45d_1182d4fcdd5b.slice/crio-53b1d7d6b18d3467b7cabb87b4f41cbd2c4381c0b741068ce7775f0f95ae8c85 WatchSource:0}: Error finding container 53b1d7d6b18d3467b7cabb87b4f41cbd2c4381c0b741068ce7775f0f95ae8c85: Status 404 returned error can't find the container with id 53b1d7d6b18d3467b7cabb87b4f41cbd2c4381c0b741068ce7775f0f95ae8c85 Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.474199 4953 request.go:700] Waited for 1.936526186s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.476052 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.496056 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.497152 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.515963 4953 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.522987 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-9v76x"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.549131 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.583029 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmrgg\" (UniqueName: \"kubernetes.io/projected/8d43eab6-53c3-4e4c-9795-3757f43aa46e-kube-api-access-hmrgg\") pod \"openshift-config-operator-7777fb866f-dxblc\" (UID: \"8d43eab6-53c3-4e4c-9795-3757f43aa46e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.599311 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdxsh\" (UniqueName: \"kubernetes.io/projected/a29cc960-b47b-494a-81bd-617e97aed612-kube-api-access-vdxsh\") pod \"collect-profiles-29335845-k5sct\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:51 crc kubenswrapper[4953]: W1011 02:48:51.601820 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacf3d6fa_3a9a_4c35_bdbf_f15d10ed3a0a.slice/crio-e7c9e8247b7f3a288a2bfbb2abd379ed7693335005331e6b06121b2ccb3b7bab WatchSource:0}: Error finding container e7c9e8247b7f3a288a2bfbb2abd379ed7693335005331e6b06121b2ccb3b7bab: Status 404 returned error can't find the container with id e7c9e8247b7f3a288a2bfbb2abd379ed7693335005331e6b06121b2ccb3b7bab Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.612627 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll8dq\" (UniqueName: \"kubernetes.io/projected/78f5dc04-9b51-442a-b90f-59aa2145c73b-kube-api-access-ll8dq\") pod \"machine-api-operator-5694c8668f-fg6dt\" (UID: \"78f5dc04-9b51-442a-b90f-59aa2145c73b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.634129 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfgv8\" (UniqueName: \"kubernetes.io/projected/8216420e-4379-4b28-9570-9766833d3a54-kube-api-access-lfgv8\") pod \"openshift-controller-manager-operator-756b6f6bc6-kb2tv\" (UID: \"8216420e-4379-4b28-9570-9766833d3a54\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.637261 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.651314 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.655229 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js2zz\" (UniqueName: \"kubernetes.io/projected/71033171-b236-43d6-974b-25553eb12ffa-kube-api-access-js2zz\") pod \"multus-admission-controller-857f4d67dd-hxx9r\" (UID: \"71033171-b236-43d6-974b-25553eb12ffa\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.674441 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrhhq\" (UniqueName: \"kubernetes.io/projected/26d51156-6dc6-4d83-8d51-f2835538a3a9-kube-api-access-zrhhq\") pod \"marketplace-operator-79b997595-8zbjh\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.688750 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" event={"ID":"eef84f8c-6025-40be-8289-bd7b4c6e5a55","Type":"ContainerStarted","Data":"b26e7aa5b1359db222bd82aeed1c6b2b32f8c5b1c1f49e7a8f1c2062b1915109"} Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.691076 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" event={"ID":"28f49c49-96ce-44b8-a402-b5a3c84ee5b4","Type":"ContainerStarted","Data":"a8eb485b682618211d06a57b15cdfa7b4eb2faadf1eb5de918abc45ff6e2e7f9"} Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.693280 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9v76x" event={"ID":"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a","Type":"ContainerStarted","Data":"e7c9e8247b7f3a288a2bfbb2abd379ed7693335005331e6b06121b2ccb3b7bab"} Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.693297 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e5b1df59-2adf-4c42-895b-4218ea5d6aee-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-j7rs7\" (UID: \"e5b1df59-2adf-4c42-895b-4218ea5d6aee\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.693508 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.694998 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" event={"ID":"e384c720-8dfd-405b-a45d-1182d4fcdd5b","Type":"ContainerStarted","Data":"53b1d7d6b18d3467b7cabb87b4f41cbd2c4381c0b741068ce7775f0f95ae8c85"} Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.701681 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.711079 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xqld\" (UniqueName: \"kubernetes.io/projected/9c3c1bc5-1fc0-4646-979e-33b5a967b866-kube-api-access-6xqld\") pod \"openshift-apiserver-operator-796bbdcf4f-t4qhc\" (UID: \"9c3c1bc5-1fc0-4646-979e-33b5a967b866\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.712988 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.720956 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.736305 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nthlj\" (UniqueName: \"kubernetes.io/projected/4d13fcb2-aa07-45c5-ae90-bac982fa0bd1-kube-api-access-nthlj\") pod \"machine-config-controller-84d6567774-67499\" (UID: \"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.764853 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48e2299c-5d8e-487c-a356-9c131cd9e38d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zx8gt\" (UID: \"48e2299c-5d8e-487c-a356-9c131cd9e38d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.766622 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6tcb8"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.775436 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zflcd\" (UniqueName: \"kubernetes.io/projected/454cbf4b-d2d3-4967-8f93-f47e06b06886-kube-api-access-zflcd\") pod \"migrator-59844c95c7-qpr8t\" (UID: \"454cbf4b-d2d3-4967-8f93-f47e06b06886\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.801135 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhrz5\" (UniqueName: \"kubernetes.io/projected/e385b9db-8b22-4cdc-95bc-595bc976ac27-kube-api-access-dhrz5\") pod \"kube-storage-version-migrator-operator-b67b599dd-xcnpf\" (UID: \"e385b9db-8b22-4cdc-95bc-595bc976ac27\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.832888 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zhsb\" (UniqueName: \"kubernetes.io/projected/66c99208-eaa2-43e2-a1ef-19a8fd996e1f-kube-api-access-8zhsb\") pod \"etcd-operator-b45778765-2kzjf\" (UID: \"66c99208-eaa2-43e2-a1ef-19a8fd996e1f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.833063 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.833097 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5p48"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.843705 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jqww\" (UniqueName: \"kubernetes.io/projected/1248ce56-3a40-4896-8df6-453d36f01910-kube-api-access-8jqww\") pod \"machine-config-operator-74547568cd-5c675\" (UID: \"1248ce56-3a40-4896-8df6-453d36f01910\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.856161 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d30ad7f6-c8b7-4466-bbf9-00a1ab119270-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-tr9hs\" (UID: \"d30ad7f6-c8b7-4466-bbf9-00a1ab119270\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.859745 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.869784 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/387ac181-16c9-4d29-ad4e-4c4aca7f294e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.898745 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wsrs\" (UniqueName: \"kubernetes.io/projected/bf23c33a-57d1-4671-a0d2-e55556106948-kube-api-access-9wsrs\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.922445 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9xmn\" (UniqueName: \"kubernetes.io/projected/d61d0811-775b-42d0-b605-f0d6a8b2dbce-kube-api-access-q9xmn\") pod \"machine-config-server-rzpfj\" (UID: \"d61d0811-775b-42d0-b605-f0d6a8b2dbce\") " pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.939525 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.940101 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twlnh\" (UniqueName: \"kubernetes.io/projected/80bba440-e5e2-4c21-befa-59ef185a7295-kube-api-access-twlnh\") pod \"control-plane-machine-set-operator-78cbb6b69f-qcfl9\" (UID: \"80bba440-e5e2-4c21-befa-59ef185a7295\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.946945 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.954782 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf23c33a-57d1-4671-a0d2-e55556106948-bound-sa-token\") pod \"ingress-operator-5b745b69d9-5t2t2\" (UID: \"bf23c33a-57d1-4671-a0d2-e55556106948\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.957266 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.963954 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.975125 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j42t6\" (UniqueName: \"kubernetes.io/projected/5d1d1cd4-d0eb-4067-ae21-c0e547df65d8-kube-api-access-j42t6\") pod \"service-ca-9c57cc56f-6jhcz\" (UID: \"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.987298 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.989849 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fg6dt"] Oct 11 02:48:51 crc kubenswrapper[4953]: I1011 02:48:51.994295 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5g9w\" (UniqueName: \"kubernetes.io/projected/19f90a61-22fa-4d99-93b6-72520fed2d71-kube-api-access-n5g9w\") pod \"ingress-canary-b9qp4\" (UID: \"19f90a61-22fa-4d99-93b6-72520fed2d71\") " pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.000198 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cv9v6"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.003010 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-ttfpq"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.006497 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.023168 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s74dz\" (UniqueName: \"kubernetes.io/projected/090fd335-08ca-49b6-beb6-80dc582340a1-kube-api-access-s74dz\") pod \"router-default-5444994796-hq74r\" (UID: \"090fd335-08ca-49b6-beb6-80dc582340a1\") " pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.042089 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.050108 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.050363 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxctx\" (UniqueName: \"kubernetes.io/projected/a00c4875-8a62-4b96-bc81-d879dff1d91d-kube-api-access-nxctx\") pod \"package-server-manager-789f6589d5-b92s9\" (UID: \"a00c4875-8a62-4b96-bc81-d879dff1d91d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.050412 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.062088 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.063141 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxlq5\" (UniqueName: \"kubernetes.io/projected/d3d36f1a-0429-4531-a820-10cd42bf7414-kube-api-access-vxlq5\") pod \"service-ca-operator-777779d784-dbxsb\" (UID: \"d3d36f1a-0429-4531-a820-10cd42bf7414\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.071532 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.077829 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s4hg\" (UniqueName: \"kubernetes.io/projected/84eff6bb-f4c1-41f5-97d5-0b28a8681c1b-kube-api-access-7s4hg\") pod \"catalog-operator-68c6474976-k7h6x\" (UID: \"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.083015 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hxx9r"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.086224 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.093888 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.097536 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.103392 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pptzh\" (UniqueName: \"kubernetes.io/projected/d811aeaa-e286-44d5-a570-8b0f0b6dfc0c-kube-api-access-pptzh\") pod \"olm-operator-6b444d44fb-kmshj\" (UID: \"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.105785 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.112423 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.118022 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsvbd\" (UniqueName: \"kubernetes.io/projected/387ac181-16c9-4d29-ad4e-4c4aca7f294e-kube-api-access-lsvbd\") pod \"cluster-image-registry-operator-dc59b4c8b-2xxts\" (UID: \"387ac181-16c9-4d29-ad4e-4c4aca7f294e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.118910 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8zbjh"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.119652 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.119782 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dxblc"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.126787 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" Oct 11 02:48:52 crc kubenswrapper[4953]: W1011 02:48:52.128714 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda29cc960_b47b_494a_81bd_617e97aed612.slice/crio-d3152770db94ce6afa131f02299332f26bb8b744e022839dd90562c3cae96453 WatchSource:0}: Error finding container d3152770db94ce6afa131f02299332f26bb8b744e022839dd90562c3cae96453: Status 404 returned error can't find the container with id d3152770db94ce6afa131f02299332f26bb8b744e022839dd90562c3cae96453 Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.133018 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.141570 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7w82\" (UniqueName: \"kubernetes.io/projected/4ed84470-d800-4f9a-b387-bf40ddfbc70e-kube-api-access-c7w82\") pod \"packageserver-d55dfcdfc-4wgln\" (UID: \"4ed84470-d800-4f9a-b387-bf40ddfbc70e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.151538 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzjcp\" (UniqueName: \"kubernetes.io/projected/93a1092d-11b9-4b17-bf72-e7296d56dbf3-kube-api-access-hzjcp\") pod \"downloads-7954f5f757-7rdbv\" (UID: \"93a1092d-11b9-4b17-bf72-e7296d56dbf3\") " pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.151849 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-b9qp4" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.159982 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-rzpfj" Oct 11 02:48:52 crc kubenswrapper[4953]: W1011 02:48:52.188012 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26d51156_6dc6_4d83_8d51_f2835538a3a9.slice/crio-91aa1cec128bd431ff059912ff18f8e580450c89ba31477513ee2617ce6b0299 WatchSource:0}: Error finding container 91aa1cec128bd431ff059912ff18f8e580450c89ba31477513ee2617ce6b0299: Status 404 returned error can't find the container with id 91aa1cec128bd431ff059912ff18f8e580450c89ba31477513ee2617ce6b0299 Oct 11 02:48:52 crc kubenswrapper[4953]: W1011 02:48:52.228576 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71033171_b236_43d6_974b_25553eb12ffa.slice/crio-57a11360368639f839312e66ef8f5143604c0f5b55fd361fc515394ab6ad9b61 WatchSource:0}: Error finding container 57a11360368639f839312e66ef8f5143604c0f5b55fd361fc515394ab6ad9b61: Status 404 returned error can't find the container with id 57a11360368639f839312e66ef8f5143604c0f5b55fd361fc515394ab6ad9b61 Oct 11 02:48:52 crc kubenswrapper[4953]: W1011 02:48:52.229435 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d43eab6_53c3_4e4c_9795_3757f43aa46e.slice/crio-c6dea521c25120a269c208eac0a650abae12f2a9df918069b60bd0b5a0adecd7 WatchSource:0}: Error finding container c6dea521c25120a269c208eac0a650abae12f2a9df918069b60bd0b5a0adecd7: Status 404 returned error can't find the container with id c6dea521c25120a269c208eac0a650abae12f2a9df918069b60bd0b5a0adecd7 Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231531 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231577 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-bound-sa-token\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231669 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231696 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-certificates\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231713 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcwbx\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-kube-api-access-gcwbx\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231759 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231784 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-tls\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.231818 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-trusted-ca\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.232221 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:52.732208052 +0000 UTC m=+143.665295696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.254919 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.325367 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334217 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334419 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334444 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-tls\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334829 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-trusted-ca\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334861 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-csi-data-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334897 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s8k5\" (UniqueName: \"kubernetes.io/projected/cc2a69c8-612d-490d-82db-f7d78cdfa38c-kube-api-access-4s8k5\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334917 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-mountpoint-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.334972 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f710d8e7-d914-47e6-91e4-abf802875b7c-metrics-tls\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335429 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-bound-sa-token\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335458 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-plugins-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335496 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335548 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-socket-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335571 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f710d8e7-d914-47e6-91e4-abf802875b7c-config-volume\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335709 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-certificates\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335750 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcwbx\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-kube-api-access-gcwbx\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335895 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-registration-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.335936 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jht47\" (UniqueName: \"kubernetes.io/projected/f710d8e7-d914-47e6-91e4-abf802875b7c-kube-api-access-jht47\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.336820 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:52.836797974 +0000 UTC m=+143.769885618 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.340509 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-certificates\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.342832 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-ca-trust-extracted\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.344730 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.352018 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.352544 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-trusted-ca\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: W1011 02:48:52.362461 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48e2299c_5d8e_487c_a356_9c131cd9e38d.slice/crio-4160eb78aae637da17c6c73168596139b25776a474491668738231d66fa69504 WatchSource:0}: Error finding container 4160eb78aae637da17c6c73168596139b25776a474491668738231d66fa69504: Status 404 returned error can't find the container with id 4160eb78aae637da17c6c73168596139b25776a474491668738231d66fa69504 Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.363237 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-bound-sa-token\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.364092 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-installation-pull-secrets\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.377314 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-tls\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.386834 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.392659 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcwbx\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-kube-api-access-gcwbx\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.438802 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.438858 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-plugins-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.438886 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-socket-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.438903 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f710d8e7-d914-47e6-91e4-abf802875b7c-config-volume\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.438956 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-registration-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.438982 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jht47\" (UniqueName: \"kubernetes.io/projected/f710d8e7-d914-47e6-91e4-abf802875b7c-kube-api-access-jht47\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.439084 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-csi-data-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.439120 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s8k5\" (UniqueName: \"kubernetes.io/projected/cc2a69c8-612d-490d-82db-f7d78cdfa38c-kube-api-access-4s8k5\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.439144 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-mountpoint-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.439168 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f710d8e7-d914-47e6-91e4-abf802875b7c-metrics-tls\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.442987 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:52.942958477 +0000 UTC m=+143.876046121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.443669 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-csi-data-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.443737 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-registration-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.444009 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-mountpoint-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.444448 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f710d8e7-d914-47e6-91e4-abf802875b7c-config-volume\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.444707 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-plugins-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.444764 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.458931 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/cc2a69c8-612d-490d-82db-f7d78cdfa38c-socket-dir\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.461191 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f710d8e7-d914-47e6-91e4-abf802875b7c-metrics-tls\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.487471 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jht47\" (UniqueName: \"kubernetes.io/projected/f710d8e7-d914-47e6-91e4-abf802875b7c-kube-api-access-jht47\") pod \"dns-default-qp84s\" (UID: \"f710d8e7-d914-47e6-91e4-abf802875b7c\") " pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.495154 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s8k5\" (UniqueName: \"kubernetes.io/projected/cc2a69c8-612d-490d-82db-f7d78cdfa38c-kube-api-access-4s8k5\") pod \"csi-hostpathplugin-vlk7d\" (UID: \"cc2a69c8-612d-490d-82db-f7d78cdfa38c\") " pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.543699 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.545205 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.545417 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.045386183 +0000 UTC m=+143.978473827 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.545707 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.546090 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.046083671 +0000 UTC m=+143.979171305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.650113 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.650787 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.150508859 +0000 UTC m=+144.083596503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.737698 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" event={"ID":"e384c720-8dfd-405b-a45d-1182d4fcdd5b","Type":"ContainerStarted","Data":"b6b478bb8ddc3285e98f9b21f4c6f94c115a94781d7e2dc2c24921614a704a62"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.741647 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-9v76x" event={"ID":"acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a","Type":"ContainerStarted","Data":"0c627546a3e075c6bc913f0ee5fd0064869872a9df64c4d79f81c1077e290c4a"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.741825 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.744755 4953 patch_prober.go:28] interesting pod/console-operator-58897d9998-9v76x container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.744812 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-9v76x" podUID="acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.746731 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" event={"ID":"71033171-b236-43d6-974b-25553eb12ffa","Type":"ContainerStarted","Data":"57a11360368639f839312e66ef8f5143604c0f5b55fd361fc515394ab6ad9b61"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.753172 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.753476 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.253463639 +0000 UTC m=+144.186551283 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.767847 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" event={"ID":"e5b1df59-2adf-4c42-895b-4218ea5d6aee","Type":"ContainerStarted","Data":"fa13671b94e867e90023030ca69205187ebfee8799ab1b34ef08c393ad6b409c"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.768245 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.774895 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" event={"ID":"8216420e-4379-4b28-9570-9766833d3a54","Type":"ContainerStarted","Data":"f82df55ae47f49cce147cf1924f461e3080fd862695382fb16b5e03a4f013de6"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.776201 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" event={"ID":"78f5dc04-9b51-442a-b90f-59aa2145c73b","Type":"ContainerStarted","Data":"4958023315d89f6ef71535687cbc799ef081b9c8f8ff930e5cc8ad747beac88c"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.777879 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" event={"ID":"28f49c49-96ce-44b8-a402-b5a3c84ee5b4","Type":"ContainerStarted","Data":"c8806244f7768aeeb17db0e6848acdee19d635c0d98decf37eed528a677fc2e5"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.786793 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.790174 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cv9v6" event={"ID":"e2235ac4-5a50-4e61-ac95-f9ce54c104c8","Type":"ContainerStarted","Data":"530d6855488a312a5bc0f238a920a5a7f26b7f5a7427d01f1e75033098cccbf3"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.800092 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" event={"ID":"2db13bfc-b49d-49d5-b055-2befef69d136","Type":"ContainerStarted","Data":"37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.800152 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" event={"ID":"2db13bfc-b49d-49d5-b055-2befef69d136","Type":"ContainerStarted","Data":"7241e6aa584f82bd3faa6f540900c87eae3dda1151b59355b9722ce65f0bda83"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.800355 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.801402 4953 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-jpcz9 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.801454 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.803109 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" event={"ID":"d55f397e-a527-4b11-ad27-3f3b986d0985","Type":"ContainerStarted","Data":"2f521f4795378f777ba1bfa2daacf692e787bd9c26c2270e5afb14eed9460197"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.809335 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" event={"ID":"8d43eab6-53c3-4e4c-9795-3757f43aa46e","Type":"ContainerStarted","Data":"c6dea521c25120a269c208eac0a650abae12f2a9df918069b60bd0b5a0adecd7"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.812630 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" event={"ID":"def79d52-fc53-4ab1-81c4-d67959b5865f","Type":"ContainerStarted","Data":"46d1416ef8208393306b4f747aef54823277f7b65dbbf41eb286c3250232ce46"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.812677 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" event={"ID":"def79d52-fc53-4ab1-81c4-d67959b5865f","Type":"ContainerStarted","Data":"13dd3777d33de1d407289732c1e0f1c185257c598947e093d98dc183d1d85bd6"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.813337 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.814818 4953 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-6tcb8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.814860 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.816066 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" event={"ID":"26d51156-6dc6-4d83-8d51-f2835538a3a9","Type":"ContainerStarted","Data":"91aa1cec128bd431ff059912ff18f8e580450c89ba31477513ee2617ce6b0299"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.822418 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" event={"ID":"454cbf4b-d2d3-4967-8f93-f47e06b06886","Type":"ContainerStarted","Data":"bf23b891ab5abd781817fbad17576cd04df91cab3a420ca9790753aae3d0f374"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.823997 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" event={"ID":"e05c4996-7333-41b0-b58d-8471886c9e2a","Type":"ContainerStarted","Data":"06acd21c33d5a8a6defed7b98e44d3c60c2f79ceea2241002b21f2711a8371bf"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.829773 4953 generic.go:334] "Generic (PLEG): container finished" podID="eef84f8c-6025-40be-8289-bd7b4c6e5a55" containerID="7296801f432ebd51ef8c87c911fb35769bfb2e90af35221a26cef62f9e423f37" exitCode=0 Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.829871 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" event={"ID":"eef84f8c-6025-40be-8289-bd7b4c6e5a55","Type":"ContainerDied","Data":"7296801f432ebd51ef8c87c911fb35769bfb2e90af35221a26cef62f9e423f37"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.832660 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" event={"ID":"48839d24-ad26-4c8c-85dd-822259056c44","Type":"ContainerStarted","Data":"5f7ab6196881311491c7eddd412da586f344c96d3fd55abd26eead642239eb11"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.833987 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" event={"ID":"e8a572a4-1181-4e1e-8169-4c481c6b84b7","Type":"ContainerStarted","Data":"d1592a5dd3396573a7577715fdf7bc64619e04c1de7c54fe70c6099fc60e6621"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.835308 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" event={"ID":"a29cc960-b47b-494a-81bd-617e97aed612","Type":"ContainerStarted","Data":"d3152770db94ce6afa131f02299332f26bb8b744e022839dd90562c3cae96453"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.836087 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" event={"ID":"48e2299c-5d8e-487c-a356-9c131cd9e38d","Type":"ContainerStarted","Data":"4160eb78aae637da17c6c73168596139b25776a474491668738231d66fa69504"} Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.856923 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.858382 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.858557 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.358533484 +0000 UTC m=+144.291621128 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.859580 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.862071 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.362040944 +0000 UTC m=+144.295128768 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.863320 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.867249 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2kzjf"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.870331 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-67499"] Oct 11 02:48:52 crc kubenswrapper[4953]: I1011 02:48:52.961366 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:52 crc kubenswrapper[4953]: E1011 02:48:52.962375 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.462345816 +0000 UTC m=+144.395433460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.042433 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.064902 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.065264 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.565247835 +0000 UTC m=+144.498335479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.104591 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.133988 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.146477 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-5c675"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.166119 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.166388 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.666304186 +0000 UTC m=+144.599391830 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.166585 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.166972 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.666964273 +0000 UTC m=+144.600051917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.169089 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.172183 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.175369 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.176391 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-9v76x" podStartSLOduration=122.176352815 podStartE2EDuration="2m2.176352815s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:53.144223838 +0000 UTC m=+144.077311472" watchObservedRunningTime="2025-10-11 02:48:53.176352815 +0000 UTC m=+144.109440479" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.267401 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.268283 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.76824036 +0000 UTC m=+144.701328004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.318305 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-b9qp4"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.376059 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.376844 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.876561988 +0000 UTC m=+144.809649632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.401844 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7rdbv"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.407382 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts"] Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.410795 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6jhcz"] Oct 11 02:48:53 crc kubenswrapper[4953]: W1011 02:48:53.481299 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93a1092d_11b9_4b17_bf72_e7296d56dbf3.slice/crio-89de169cc65f5722f617d2638e3b240089977d58cd02052ef0740234cfa22311 WatchSource:0}: Error finding container 89de169cc65f5722f617d2638e3b240089977d58cd02052ef0740234cfa22311: Status 404 returned error can't find the container with id 89de169cc65f5722f617d2638e3b240089977d58cd02052ef0740234cfa22311 Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.481995 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.482193 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.982157636 +0000 UTC m=+144.915245270 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.482510 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.482716 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln"] Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.482942 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:53.982932186 +0000 UTC m=+144.916019830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.515528 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-qp84s"] Oct 11 02:48:53 crc kubenswrapper[4953]: W1011 02:48:53.520002 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19f90a61_22fa_4d99_93b6_72520fed2d71.slice/crio-5e68574e63617a41775c44fcb82bb58c1f705fc7d04437258a2b765ce2cdb769 WatchSource:0}: Error finding container 5e68574e63617a41775c44fcb82bb58c1f705fc7d04437258a2b765ce2cdb769: Status 404 returned error can't find the container with id 5e68574e63617a41775c44fcb82bb58c1f705fc7d04437258a2b765ce2cdb769 Oct 11 02:48:53 crc kubenswrapper[4953]: W1011 02:48:53.527446 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod387ac181_16c9_4d29_ad4e_4c4aca7f294e.slice/crio-13ffea114d1c213adf463a166da720bea45a1b05a29d8aa78825140f6b3ee859 WatchSource:0}: Error finding container 13ffea114d1c213adf463a166da720bea45a1b05a29d8aa78825140f6b3ee859: Status 404 returned error can't find the container with id 13ffea114d1c213adf463a166da720bea45a1b05a29d8aa78825140f6b3ee859 Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.550208 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj"] Oct 11 02:48:53 crc kubenswrapper[4953]: W1011 02:48:53.570355 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf710d8e7_d914_47e6_91e4_abf802875b7c.slice/crio-ae831bf0c9a9ebb18051746da65f60442f73374f02758a317860586487bd6407 WatchSource:0}: Error finding container ae831bf0c9a9ebb18051746da65f60442f73374f02758a317860586487bd6407: Status 404 returned error can't find the container with id ae831bf0c9a9ebb18051746da65f60442f73374f02758a317860586487bd6407 Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.587715 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.588152 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.088114753 +0000 UTC m=+145.021202397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.590382 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-vlk7d"] Oct 11 02:48:53 crc kubenswrapper[4953]: W1011 02:48:53.652434 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc2a69c8_612d_490d_82db_f7d78cdfa38c.slice/crio-d20e167c116367bf736711a5c50c82e715d1a681c1692d31a557d7aaf3ebec35 WatchSource:0}: Error finding container d20e167c116367bf736711a5c50c82e715d1a681c1692d31a557d7aaf3ebec35: Status 404 returned error can't find the container with id d20e167c116367bf736711a5c50c82e715d1a681c1692d31a557d7aaf3ebec35 Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.698363 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.699168 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.199148021 +0000 UTC m=+145.132235665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.784478 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" podStartSLOduration=121.784454156 podStartE2EDuration="2m1.784454156s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:53.755315396 +0000 UTC m=+144.688403070" watchObservedRunningTime="2025-10-11 02:48:53.784454156 +0000 UTC m=+144.717541800" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.799532 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.799777 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.29976022 +0000 UTC m=+145.232847864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.799868 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.800161 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.300153561 +0000 UTC m=+145.233241205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.845260 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" event={"ID":"a00c4875-8a62-4b96-bc81-d879dff1d91d","Type":"ContainerStarted","Data":"a2edc36776d53f0adeefa4004cdadf0461f25bed385a14a134d91c5579798146"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.865169 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" event={"ID":"78f5dc04-9b51-442a-b90f-59aa2145c73b","Type":"ContainerStarted","Data":"e75fc09ac29aad204391ea15a8ccda7e33344bb95584b437b57128832f44a268"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.878983 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" event={"ID":"bf23c33a-57d1-4671-a0d2-e55556106948","Type":"ContainerStarted","Data":"46088683aebed667607030f1891b4efb87e12adcb5f9dfa71b61c84482ff8961"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.884663 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hq74r" event={"ID":"090fd335-08ca-49b6-beb6-80dc582340a1","Type":"ContainerStarted","Data":"70a8c490afb8945b096298877b567e97cb289d1f72127ba40589045beb53a4c8"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.886307 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cv9v6" event={"ID":"e2235ac4-5a50-4e61-ac95-f9ce54c104c8","Type":"ContainerStarted","Data":"bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.899014 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" event={"ID":"48e2299c-5d8e-487c-a356-9c131cd9e38d","Type":"ContainerStarted","Data":"5a06c0fa4a6ab6d08ec7e122c86320af32bc2d71fec42fa26dbc9c3417442e35"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.904265 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.904432 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.404384594 +0000 UTC m=+145.337472238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.904650 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:53 crc kubenswrapper[4953]: E1011 02:48:53.905005 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.404989159 +0000 UTC m=+145.338076803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.910030 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" event={"ID":"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8","Type":"ContainerStarted","Data":"daca11f3de2370903bbd0ce91658beac576c177f77dba955118f711652e9f914"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.917196 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" event={"ID":"e384c720-8dfd-405b-a45d-1182d4fcdd5b","Type":"ContainerStarted","Data":"d269e39960b6500c8a78ced1def9365072ece02165ca68df71db24cbebc33aa2"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.920265 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" event={"ID":"28f49c49-96ce-44b8-a402-b5a3c84ee5b4","Type":"ContainerStarted","Data":"7ceb073cf8d76f6a56d16fae356e6080d4952e46854ed22affb9eee7370949d0"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.926264 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" event={"ID":"80bba440-e5e2-4c21-befa-59ef185a7295","Type":"ContainerStarted","Data":"c780b9f732c094b5b08b69aaf6cb014ba0db2496b83ad8194d3a8c0ed9bea0ef"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.927700 4953 generic.go:334] "Generic (PLEG): container finished" podID="e8a572a4-1181-4e1e-8169-4c481c6b84b7" containerID="45cc6fd1352c9af706a65dc49fe6db403170bba3fe7dcb9647d477d24afdc49b" exitCode=0 Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.928103 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" event={"ID":"e8a572a4-1181-4e1e-8169-4c481c6b84b7","Type":"ContainerDied","Data":"45cc6fd1352c9af706a65dc49fe6db403170bba3fe7dcb9647d477d24afdc49b"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.929355 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" event={"ID":"454cbf4b-d2d3-4967-8f93-f47e06b06886","Type":"ContainerStarted","Data":"27cd5d8b941fb0c4e9677cd89e9e08923acf323d9ae977a412430c94022942ad"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.930630 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" event={"ID":"e05c4996-7333-41b0-b58d-8471886c9e2a","Type":"ContainerStarted","Data":"25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.931049 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.932312 4953 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-s5p48 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.30:6443/healthz\": dial tcp 10.217.0.30:6443: connect: connection refused" start-of-body= Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.932385 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.30:6443/healthz\": dial tcp 10.217.0.30:6443: connect: connection refused" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.941284 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" event={"ID":"d55f397e-a527-4b11-ad27-3f3b986d0985","Type":"ContainerStarted","Data":"b6b51b111f772f4848faa8d76cd70b1fe9c155fd994f1b489bfa2c9959aa8265"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.951959 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" event={"ID":"e385b9db-8b22-4cdc-95bc-595bc976ac27","Type":"ContainerStarted","Data":"0f87955973104ca6fdd696cc3923dce3b5c8e4e25b80175985ffa6b41d0561bb"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.957837 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" event={"ID":"8d43eab6-53c3-4e4c-9795-3757f43aa46e","Type":"ContainerStarted","Data":"c2b23e9b731932b13c145da8a99266e76c404a7432079ac7e1c3681ebadc5acd"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.966101 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" event={"ID":"1248ce56-3a40-4896-8df6-453d36f01910","Type":"ContainerStarted","Data":"92d156c83420367f44395164cd67f4ccd4c76f4ff896f85fc3c5e172b3ce2111"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.967827 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" event={"ID":"9c3c1bc5-1fc0-4646-979e-33b5a967b866","Type":"ContainerStarted","Data":"131d88d830890ce0c86c72e5a4be2f970ad4676999ac1de7e2280724841ec67f"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.968979 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" event={"ID":"cc2a69c8-612d-490d-82db-f7d78cdfa38c","Type":"ContainerStarted","Data":"d20e167c116367bf736711a5c50c82e715d1a681c1692d31a557d7aaf3ebec35"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.970051 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" event={"ID":"d30ad7f6-c8b7-4466-bbf9-00a1ab119270","Type":"ContainerStarted","Data":"62ea9bfae687fe8fa6fc52c1fc8ba8fb3c13a24fdefb883f3809a5ad50f2eeb3"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.972329 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" event={"ID":"8216420e-4379-4b28-9570-9766833d3a54","Type":"ContainerStarted","Data":"68449d09b15501fe333f8046c7dead648e695c92c4d7dbe1e19349e28a3a817e"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.974762 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-b9qp4" event={"ID":"19f90a61-22fa-4d99-93b6-72520fed2d71","Type":"ContainerStarted","Data":"5e68574e63617a41775c44fcb82bb58c1f705fc7d04437258a2b765ce2cdb769"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.977495 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" event={"ID":"a29cc960-b47b-494a-81bd-617e97aed612","Type":"ContainerStarted","Data":"488ce46fe2029dc1801f904b60647beeb54bf858b61e45f2a08105a49372a591"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.978894 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" event={"ID":"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b","Type":"ContainerStarted","Data":"f7c889cf5348359ab484517d5350333edd7c6b8c53c3d6c02b5a9d67fcb8ba4f"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.981042 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" event={"ID":"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1","Type":"ContainerStarted","Data":"65e39638e497a8520bc0b9f0ea866f3fe59af1942c3034447fd1f6945f851fe0"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.982861 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-rzpfj" event={"ID":"d61d0811-775b-42d0-b605-f0d6a8b2dbce","Type":"ContainerStarted","Data":"acde6fd307d82c369da496956f4185b0bdd7d4bb427c629eb03f9ef1aa6eeb0c"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.982914 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-rzpfj" event={"ID":"d61d0811-775b-42d0-b605-f0d6a8b2dbce","Type":"ContainerStarted","Data":"caadd5eb723a94f717027f9549df0a010d0473338ca92034c82338c1596e5a34"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.985752 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" event={"ID":"26d51156-6dc6-4d83-8d51-f2835538a3a9","Type":"ContainerStarted","Data":"7d99427daf2463cc3958d6288931f97a268a9df6e4d5d01084dd6b265b4216bb"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.986731 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.989284 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" event={"ID":"48839d24-ad26-4c8c-85dd-822259056c44","Type":"ContainerStarted","Data":"6e391cf937f7dc9a886fc8580040129f97ad51e2eef12cc90f685e9bbca088d1"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.990068 4953 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8zbjh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.990144 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.991162 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qp84s" event={"ID":"f710d8e7-d914-47e6-91e4-abf802875b7c","Type":"ContainerStarted","Data":"ae831bf0c9a9ebb18051746da65f60442f73374f02758a317860586487bd6407"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.991916 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" event={"ID":"387ac181-16c9-4d29-ad4e-4c4aca7f294e","Type":"ContainerStarted","Data":"13ffea114d1c213adf463a166da720bea45a1b05a29d8aa78825140f6b3ee859"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.992578 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" event={"ID":"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c","Type":"ContainerStarted","Data":"3ab75372e4a8ccd82d83eced78590126ff7bfa5ef59b44c8e63859befa922b5c"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.993658 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" event={"ID":"4ed84470-d800-4f9a-b387-bf40ddfbc70e","Type":"ContainerStarted","Data":"455f7a8101e1a1c92fa5eb5ce2dee5497b4eb8731d6f7715f09ed997c648c044"} Oct 11 02:48:53 crc kubenswrapper[4953]: I1011 02:48:53.996250 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7rdbv" event={"ID":"93a1092d-11b9-4b17-bf72-e7296d56dbf3","Type":"ContainerStarted","Data":"89de169cc65f5722f617d2638e3b240089977d58cd02052ef0740234cfa22311"} Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.002768 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" event={"ID":"66c99208-eaa2-43e2-a1ef-19a8fd996e1f","Type":"ContainerStarted","Data":"76143a51197482f6fbed8c514ca875fdb73bf2a4be61e5f1c9622acbde3e1d5e"} Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.007704 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" event={"ID":"d3d36f1a-0429-4531-a820-10cd42bf7414","Type":"ContainerStarted","Data":"2cbb3f0a023a482ae3b8f96e3285442040c102f1d872a1c7f1d7b25a336400b9"} Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.008703 4953 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-6tcb8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.008739 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.008817 4953 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-jpcz9 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.008870 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.008961 4953 patch_prober.go:28] interesting pod/console-operator-58897d9998-9v76x container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.008974 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-9v76x" podUID="acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.009430 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.009526 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.509505479 +0000 UTC m=+145.442593123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.009670 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.010068 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.510057934 +0000 UTC m=+145.443145578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.108118 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" podStartSLOduration=123.108091627 podStartE2EDuration="2m3.108091627s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.105448999 +0000 UTC m=+145.038536643" watchObservedRunningTime="2025-10-11 02:48:54.108091627 +0000 UTC m=+145.041179271" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.112086 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.112205 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.612184603 +0000 UTC m=+145.545272247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.112450 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.114009 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.613992149 +0000 UTC m=+145.547079793 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.162037 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-d8xwl" podStartSLOduration=123.162006145 podStartE2EDuration="2m3.162006145s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.160045884 +0000 UTC m=+145.093133528" watchObservedRunningTime="2025-10-11 02:48:54.162006145 +0000 UTC m=+145.095093779" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.216853 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.217118 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.717081803 +0000 UTC m=+145.650169447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.217592 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.218069 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.718056898 +0000 UTC m=+145.651144722 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.228852 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-7gjs6" podStartSLOduration=123.228830665 podStartE2EDuration="2m3.228830665s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.227985943 +0000 UTC m=+145.161073597" watchObservedRunningTime="2025-10-11 02:48:54.228830665 +0000 UTC m=+145.161918329" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.271291 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" podStartSLOduration=123.271270187 podStartE2EDuration="2m3.271270187s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.268064225 +0000 UTC m=+145.201151869" watchObservedRunningTime="2025-10-11 02:48:54.271270187 +0000 UTC m=+145.204357831" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.316363 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-cv9v6" podStartSLOduration=123.316318627 podStartE2EDuration="2m3.316318627s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.315081455 +0000 UTC m=+145.248169119" watchObservedRunningTime="2025-10-11 02:48:54.316318627 +0000 UTC m=+145.249406281" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.318734 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.319793 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.819775196 +0000 UTC m=+145.752862840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.353443 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-kb2tv" podStartSLOduration=123.353418302 podStartE2EDuration="2m3.353418302s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.350889277 +0000 UTC m=+145.283976911" watchObservedRunningTime="2025-10-11 02:48:54.353418302 +0000 UTC m=+145.286505936" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.398851 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" podStartSLOduration=122.398822421 podStartE2EDuration="2m2.398822421s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.394289784 +0000 UTC m=+145.327377438" watchObservedRunningTime="2025-10-11 02:48:54.398822421 +0000 UTC m=+145.331910065" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.420752 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.421235 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:54.921216627 +0000 UTC m=+145.854304281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.432447 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" podStartSLOduration=123.432426546 podStartE2EDuration="2m3.432426546s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.43142354 +0000 UTC m=+145.364511184" watchObservedRunningTime="2025-10-11 02:48:54.432426546 +0000 UTC m=+145.365514190" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.470740 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-ttfpq" podStartSLOduration=123.470714781 podStartE2EDuration="2m3.470714781s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.467705764 +0000 UTC m=+145.400793418" watchObservedRunningTime="2025-10-11 02:48:54.470714781 +0000 UTC m=+145.403802425" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.531215 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.531983 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.031964898 +0000 UTC m=+145.965052542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.554921 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-rzpfj" podStartSLOduration=5.5548963879999995 podStartE2EDuration="5.554896388s" podCreationTimestamp="2025-10-11 02:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.551437889 +0000 UTC m=+145.484525533" watchObservedRunningTime="2025-10-11 02:48:54.554896388 +0000 UTC m=+145.487984032" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.556233 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zx8gt" podStartSLOduration=122.556224692 podStartE2EDuration="2m2.556224692s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:54.513724448 +0000 UTC m=+145.446812102" watchObservedRunningTime="2025-10-11 02:48:54.556224692 +0000 UTC m=+145.489312336" Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.633539 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.634040 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.134019505 +0000 UTC m=+146.067107149 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.734991 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.735242 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.235206539 +0000 UTC m=+146.168294173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.735777 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.736107 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.236093582 +0000 UTC m=+146.169181226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.837142 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.837409 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.337372879 +0000 UTC m=+146.270460523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.837783 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.838423 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.338407526 +0000 UTC m=+146.271495170 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.939222 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.939536 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.439471427 +0000 UTC m=+146.372559061 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:54 crc kubenswrapper[4953]: I1011 02:48:54.939924 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:54 crc kubenswrapper[4953]: E1011 02:48:54.940285 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.440276778 +0000 UTC m=+146.373364422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.013876 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" event={"ID":"d3d36f1a-0429-4531-a820-10cd42bf7414","Type":"ContainerStarted","Data":"b521e46daec5da021f68f80248ea6fb8156d93471382177518af1f3a1fbc17cc"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.015989 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" event={"ID":"9c3c1bc5-1fc0-4646-979e-33b5a967b866","Type":"ContainerStarted","Data":"59c8b7576d7187f21dc485441986558af7b6e566d21924dd30ec77b65e451714"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.020592 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" event={"ID":"e5b1df59-2adf-4c42-895b-4218ea5d6aee","Type":"ContainerStarted","Data":"a12b4096084e4aecea95e2417c036464433f512056706d54cea6fa918984aa8f"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.022573 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" event={"ID":"d811aeaa-e286-44d5-a570-8b0f0b6dfc0c","Type":"ContainerStarted","Data":"c4f02814f282939460378ddb3ca3db6c5631ff5251a0b379e688acc59c621b19"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.026151 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" event={"ID":"71033171-b236-43d6-974b-25553eb12ffa","Type":"ContainerStarted","Data":"4451ed53e589ef7fa3400f100fdca2cf7212e00031be98b6a52fa1f5018460a3"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.027729 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" event={"ID":"80bba440-e5e2-4c21-befa-59ef185a7295","Type":"ContainerStarted","Data":"cd2429cb1b72049ea33db9b4d8741921e0824cbe951dd536932bffbc73bb81a0"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.030659 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" event={"ID":"eef84f8c-6025-40be-8289-bd7b4c6e5a55","Type":"ContainerStarted","Data":"72a03a5c833a03934859e99a9451832627d9f428c0dff523fb0a0fca72eddfbf"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.032678 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" event={"ID":"387ac181-16c9-4d29-ad4e-4c4aca7f294e","Type":"ContainerStarted","Data":"897692ae536aa78fd81ac63a8082a9d4072538f9ef7f80b1704f66f934915db9"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.037334 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-j7rs7" podStartSLOduration=124.037320346 podStartE2EDuration="2m4.037320346s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:55.034054422 +0000 UTC m=+145.967142066" watchObservedRunningTime="2025-10-11 02:48:55.037320346 +0000 UTC m=+145.970407990" Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.038020 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" event={"ID":"66c99208-eaa2-43e2-a1ef-19a8fd996e1f","Type":"ContainerStarted","Data":"47229c81d057fffc0cb4e34cd25e712afefdd29f7f2d56054237589b4f7d7ed4"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.040831 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.041324 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.541300788 +0000 UTC m=+146.474388442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.042125 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" event={"ID":"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1","Type":"ContainerStarted","Data":"6604fc23178e21538489df4239cfcf436b46900b9925cd93622ce156f93fd851"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.044466 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" event={"ID":"e385b9db-8b22-4cdc-95bc-595bc976ac27","Type":"ContainerStarted","Data":"9f7959c8d62760ff0be914f48a4f5d69e22b47de85023b9ae9aa124851c1e21f"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.046176 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" event={"ID":"1248ce56-3a40-4896-8df6-453d36f01910","Type":"ContainerStarted","Data":"e13a044238f81ad9dfcf8b6c686cd0213e67556fd8654449250392608d8f12b1"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.047879 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" event={"ID":"a00c4875-8a62-4b96-bc81-d879dff1d91d","Type":"ContainerStarted","Data":"607a02a231907dc42dbdf399e4aa9dc9f1dcf6b2e0a822d66f31765e039de2e3"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.049176 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" event={"ID":"84eff6bb-f4c1-41f5-97d5-0b28a8681c1b","Type":"ContainerStarted","Data":"832c20440b20c98102af7ba10b2e350a62e2f59d22e5dc79758d64d89622d3c3"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.051110 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" event={"ID":"bf23c33a-57d1-4671-a0d2-e55556106948","Type":"ContainerStarted","Data":"27fec09a15a3cc88f2e1c08f941b7bf23f27cc711056c061e473d772cc997019"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.060304 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-2kzjf" podStartSLOduration=124.060265787 podStartE2EDuration="2m4.060265787s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:55.055760081 +0000 UTC m=+145.988847785" watchObservedRunningTime="2025-10-11 02:48:55.060265787 +0000 UTC m=+145.993353451" Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.067838 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-hq74r" event={"ID":"090fd335-08ca-49b6-beb6-80dc582340a1","Type":"ContainerStarted","Data":"49ad8bfa3f5acd2d29c17f887bf619336f32ed7e7c1654b58dac608930c3bdff"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.071597 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-b9qp4" event={"ID":"19f90a61-22fa-4d99-93b6-72520fed2d71","Type":"ContainerStarted","Data":"489752e6677aae951805993858fb861f1702eb4e37a08fa10356bcbb341e0cfd"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.074936 4953 generic.go:334] "Generic (PLEG): container finished" podID="8d43eab6-53c3-4e4c-9795-3757f43aa46e" containerID="c2b23e9b731932b13c145da8a99266e76c404a7432079ac7e1c3681ebadc5acd" exitCode=0 Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.075204 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" event={"ID":"8d43eab6-53c3-4e4c-9795-3757f43aa46e","Type":"ContainerDied","Data":"c2b23e9b731932b13c145da8a99266e76c404a7432079ac7e1c3681ebadc5acd"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.077872 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" event={"ID":"d30ad7f6-c8b7-4466-bbf9-00a1ab119270","Type":"ContainerStarted","Data":"1efbb1fc5e5a4b22978201323d09c11b805a50364c09ac5477405bb2730d5124"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.085693 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" event={"ID":"78f5dc04-9b51-442a-b90f-59aa2145c73b","Type":"ContainerStarted","Data":"b2836502a6456f109017d1ca69de17483508bda0e3d41620b18c8f999d58161e"} Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.086324 4953 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8zbjh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" start-of-body= Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.086378 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.19:8080/healthz\": dial tcp 10.217.0.19:8080: connect: connection refused" Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.086970 4953 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-s5p48 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.30:6443/healthz\": dial tcp 10.217.0.30:6443: connect: connection refused" start-of-body= Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.087018 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.30:6443/healthz\": dial tcp 10.217.0.30:6443: connect: connection refused" Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.087341 4953 patch_prober.go:28] interesting pod/console-operator-58897d9998-9v76x container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.087358 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-9v76x" podUID="acf3d6fa-3a9a-4c35-bdbf-f15d10ed3a0a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.143231 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.145043 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.645026568 +0000 UTC m=+146.578114402 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.244388 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.245829 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.745805703 +0000 UTC m=+146.678893347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.350210 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.350559 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.850545799 +0000 UTC m=+146.783633443 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.451457 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.451783 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.951748344 +0000 UTC m=+146.884835988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.452585 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.453021 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:55.953009846 +0000 UTC m=+146.886097680 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.553408 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.553592 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.053558364 +0000 UTC m=+146.986646008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.553805 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.554409 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.054396306 +0000 UTC m=+146.987483950 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.654564 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.654945 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.154908173 +0000 UTC m=+147.087995857 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.757011 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.757739 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.257725 +0000 UTC m=+147.190812644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.858849 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.859264 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.359241283 +0000 UTC m=+147.292328927 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.859432 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.860047 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.360023023 +0000 UTC m=+147.293110667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.960483 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.960715 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.460681814 +0000 UTC m=+147.393769468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:55 crc kubenswrapper[4953]: I1011 02:48:55.960895 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:55 crc kubenswrapper[4953]: E1011 02:48:55.961470 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.461455294 +0000 UTC m=+147.394542938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.061901 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.062533 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.562510635 +0000 UTC m=+147.495598279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.062631 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.063190 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.562991607 +0000 UTC m=+147.496079251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.094038 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" event={"ID":"5d1d1cd4-d0eb-4067-ae21-c0e547df65d8","Type":"ContainerStarted","Data":"a0d2c49540614522848b4bd88227be1e236643234808ca86dd7fdc63997f0d02"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.096349 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" event={"ID":"1248ce56-3a40-4896-8df6-453d36f01910","Type":"ContainerStarted","Data":"60ba25c81e0589408c957f4c626a90a7f07494624c111e37693e184c86f5c0b1"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.098627 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qp84s" event={"ID":"f710d8e7-d914-47e6-91e4-abf802875b7c","Type":"ContainerStarted","Data":"84ce27b4dd6060995dd65a03953c58a8967c6efdf982a57b75b373a25101b6f6"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.098653 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qp84s" event={"ID":"f710d8e7-d914-47e6-91e4-abf802875b7c","Type":"ContainerStarted","Data":"5e9d297f394b2ab82f9e4f073487b18605d3952a88e3d2ae29001e7f497b07cf"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.098745 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-qp84s" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.100777 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" event={"ID":"8d43eab6-53c3-4e4c-9795-3757f43aa46e","Type":"ContainerStarted","Data":"4722f865ea5aed5c539f29eaa5444e61526ebc900ff0c11760b2939f3d9f06a4"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.100889 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.103800 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" event={"ID":"eef84f8c-6025-40be-8289-bd7b4c6e5a55","Type":"ContainerStarted","Data":"adb85bbb39a5169ec2f047d8e64fc9dc7186faf6337332284b18eb57b70371a3"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.104990 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" event={"ID":"4ed84470-d800-4f9a-b387-bf40ddfbc70e","Type":"ContainerStarted","Data":"949ea58605c121f8ec0b9cac01f3ef30defc21d39e701d4a3c460916620dfff8"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.105623 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.107311 4953 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-4wgln container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:5443/healthz\": dial tcp 10.217.0.31:5443: connect: connection refused" start-of-body= Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.107360 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" podUID="4ed84470-d800-4f9a-b387-bf40ddfbc70e" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.31:5443/healthz\": dial tcp 10.217.0.31:5443: connect: connection refused" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.108281 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" event={"ID":"bf23c33a-57d1-4671-a0d2-e55556106948","Type":"ContainerStarted","Data":"6936600567bef529c19f760d69d875ceaf1f39b87defe2a04c7d0e2edc03ca05"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.110550 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" event={"ID":"a00c4875-8a62-4b96-bc81-d879dff1d91d","Type":"ContainerStarted","Data":"2253ae0cca3b17f30c5dd47baba09cbadb5761a2613bf3d3de9ff8107b77c425"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.110762 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.112566 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" event={"ID":"e8a572a4-1181-4e1e-8169-4c481c6b84b7","Type":"ContainerStarted","Data":"3024fcc03db951036f4ebce4ae34362096771ff0d2cc35dc612ce43f3de15b06"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.113977 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" event={"ID":"4d13fcb2-aa07-45c5-ae90-bac982fa0bd1","Type":"ContainerStarted","Data":"557101b1fafa610045052327306067c8b56bbb4b76a0f2a91b38f22864889dfd"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.115375 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" event={"ID":"71033171-b236-43d6-974b-25553eb12ffa","Type":"ContainerStarted","Data":"fdc68eec1d6d30c26a14bb81686d77e431dad1c14dcbc7fa5e299d4a1146844c"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.116872 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" event={"ID":"48839d24-ad26-4c8c-85dd-822259056c44","Type":"ContainerStarted","Data":"0fc1f3adb10ba565096897d12195e0350949ec59beb5f066081a29c7517d0aa4"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.119011 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" event={"ID":"454cbf4b-d2d3-4967-8f93-f47e06b06886","Type":"ContainerStarted","Data":"067c2b64a97094ed60c8f21b20e8dde3381f92d89f2e9e1cc05b0aa21507aae6"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.120406 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7rdbv" event={"ID":"93a1092d-11b9-4b17-bf72-e7296d56dbf3","Type":"ContainerStarted","Data":"31ea7b80a20e4cac173e2dee66e3f473c16088ada35609938e16f94ae6221ef5"} Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.136399 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.137504 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.137854 4953 patch_prober.go:28] interesting pod/apiserver-76f77b778f-t5tdt container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.138042 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" podUID="eef84f8c-6025-40be-8289-bd7b4c6e5a55" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.7:8443/livez\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.159547 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6jhcz" podStartSLOduration=124.159524162 podStartE2EDuration="2m4.159524162s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.133727908 +0000 UTC m=+147.066815562" watchObservedRunningTime="2025-10-11 02:48:56.159524162 +0000 UTC m=+147.092611806" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.160304 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-hq74r" podStartSLOduration=125.160298222 podStartE2EDuration="2m5.160298222s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.157286655 +0000 UTC m=+147.090374299" watchObservedRunningTime="2025-10-11 02:48:56.160298222 +0000 UTC m=+147.093385866" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.163626 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.165070 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.665056625 +0000 UTC m=+147.598144269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.187309 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-b9qp4" podStartSLOduration=7.187283517 podStartE2EDuration="7.187283517s" podCreationTimestamp="2025-10-11 02:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.185940232 +0000 UTC m=+147.119027876" watchObservedRunningTime="2025-10-11 02:48:56.187283517 +0000 UTC m=+147.120371151" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.234809 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-hxx9r" podStartSLOduration=124.234781639 podStartE2EDuration="2m4.234781639s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.222552155 +0000 UTC m=+147.155639799" watchObservedRunningTime="2025-10-11 02:48:56.234781639 +0000 UTC m=+147.167869283" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.259420 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" podStartSLOduration=124.259395953 podStartE2EDuration="2m4.259395953s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.257536345 +0000 UTC m=+147.190623989" watchObservedRunningTime="2025-10-11 02:48:56.259395953 +0000 UTC m=+147.192483597" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.265236 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.268416 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.768398365 +0000 UTC m=+147.701486009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.292267 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-qp84s" podStartSLOduration=7.292245069 podStartE2EDuration="7.292245069s" podCreationTimestamp="2025-10-11 02:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.290046832 +0000 UTC m=+147.223134476" watchObservedRunningTime="2025-10-11 02:48:56.292245069 +0000 UTC m=+147.225332713" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.317897 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.318355 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.319971 4953 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-jh2h9 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.11:8443/livez\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.320019 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" podUID="e8a572a4-1181-4e1e-8169-4c481c6b84b7" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.11:8443/livez\": dial tcp 10.217.0.11:8443: connect: connection refused" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.355992 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" podStartSLOduration=125.355970839 podStartE2EDuration="2m5.355970839s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.329097937 +0000 UTC m=+147.262185591" watchObservedRunningTime="2025-10-11 02:48:56.355970839 +0000 UTC m=+147.289058483" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.366868 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.367305 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.8672854 +0000 UTC m=+147.800373044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.449530 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dbxsb" podStartSLOduration=124.449505816 podStartE2EDuration="2m4.449505816s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.358716889 +0000 UTC m=+147.291804533" watchObservedRunningTime="2025-10-11 02:48:56.449505816 +0000 UTC m=+147.382593460" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.469379 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.469976 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:56.969958643 +0000 UTC m=+147.903046287 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.570665 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.570923 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.070890951 +0000 UTC m=+148.003978585 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.571030 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.571403 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.071388064 +0000 UTC m=+148.004475708 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.620046 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fg6dt" podStartSLOduration=124.620022826 podStartE2EDuration="2m4.620022826s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.454641549 +0000 UTC m=+147.387729193" watchObservedRunningTime="2025-10-11 02:48:56.620022826 +0000 UTC m=+147.553110470" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.620668 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" podStartSLOduration=124.620662542 podStartE2EDuration="2m4.620662542s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.619515643 +0000 UTC m=+147.552603297" watchObservedRunningTime="2025-10-11 02:48:56.620662542 +0000 UTC m=+147.553750186" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.648548 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xcnpf" podStartSLOduration=124.648526559 podStartE2EDuration="2m4.648526559s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.646564979 +0000 UTC m=+147.579652623" watchObservedRunningTime="2025-10-11 02:48:56.648526559 +0000 UTC m=+147.581614203" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.672909 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.673393 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.173373209 +0000 UTC m=+148.106460843 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.712522 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-5c675" podStartSLOduration=124.712496326 podStartE2EDuration="2m4.712496326s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.688362945 +0000 UTC m=+147.621450589" watchObservedRunningTime="2025-10-11 02:48:56.712496326 +0000 UTC m=+147.645583970" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.715271 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-lnwjz" podStartSLOduration=125.715250347 podStartE2EDuration="2m5.715250347s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.711852889 +0000 UTC m=+147.644940533" watchObservedRunningTime="2025-10-11 02:48:56.715250347 +0000 UTC m=+147.648337991" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.733092 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qpr8t" podStartSLOduration=124.733065505 podStartE2EDuration="2m4.733065505s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.729970886 +0000 UTC m=+147.663058540" watchObservedRunningTime="2025-10-11 02:48:56.733065505 +0000 UTC m=+147.666153149" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.761189 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" podStartSLOduration=124.761169739 podStartE2EDuration="2m4.761169739s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.759356172 +0000 UTC m=+147.692443816" watchObservedRunningTime="2025-10-11 02:48:56.761169739 +0000 UTC m=+147.694257383" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.775411 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.776197 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.276180315 +0000 UTC m=+148.209267959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.800834 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" podStartSLOduration=125.800808069 podStartE2EDuration="2m5.800808069s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.799057794 +0000 UTC m=+147.732145448" watchObservedRunningTime="2025-10-11 02:48:56.800808069 +0000 UTC m=+147.733895713" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.823727 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-67499" podStartSLOduration=124.823707209 podStartE2EDuration="2m4.823707209s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.82219352 +0000 UTC m=+147.755281184" watchObservedRunningTime="2025-10-11 02:48:56.823707209 +0000 UTC m=+147.756794853" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.876847 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.877380 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.377346699 +0000 UTC m=+148.310434343 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.877706 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.878093 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.378079928 +0000 UTC m=+148.311167572 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.900311 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2xxts" podStartSLOduration=125.90029153 podStartE2EDuration="2m5.90029153s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.856261107 +0000 UTC m=+147.789348761" watchObservedRunningTime="2025-10-11 02:48:56.90029153 +0000 UTC m=+147.833379174" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.900457 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" podStartSLOduration=124.900453754 podStartE2EDuration="2m4.900453754s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.89173833 +0000 UTC m=+147.824825974" watchObservedRunningTime="2025-10-11 02:48:56.900453754 +0000 UTC m=+147.833541398" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.945420 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" podStartSLOduration=124.945398701 podStartE2EDuration="2m4.945398701s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.924148024 +0000 UTC m=+147.857235668" watchObservedRunningTime="2025-10-11 02:48:56.945398701 +0000 UTC m=+147.878486345" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.947116 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7rdbv" podStartSLOduration=125.947108805 podStartE2EDuration="2m5.947108805s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.943941304 +0000 UTC m=+147.877028948" watchObservedRunningTime="2025-10-11 02:48:56.947108805 +0000 UTC m=+147.880196449" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.974105 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-5t2t2" podStartSLOduration=125.974081219 podStartE2EDuration="2m5.974081219s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:56.971251867 +0000 UTC m=+147.904339511" watchObservedRunningTime="2025-10-11 02:48:56.974081219 +0000 UTC m=+147.907168863" Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.979248 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.979413 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.479384556 +0000 UTC m=+148.412472200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:56 crc kubenswrapper[4953]: I1011 02:48:56.979601 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:56 crc kubenswrapper[4953]: E1011 02:48:56.979964 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.479956831 +0000 UTC m=+148.413044475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.007126 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-tr9hs" podStartSLOduration=126.007105279 podStartE2EDuration="2m6.007105279s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:57.005461897 +0000 UTC m=+147.938549541" watchObservedRunningTime="2025-10-11 02:48:57.007105279 +0000 UTC m=+147.940192923" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.080593 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.080985 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.580967161 +0000 UTC m=+148.514054805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.094294 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.099745 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:48:57 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:48:57 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:48:57 crc kubenswrapper[4953]: healthz check failed Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.099802 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.113737 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-qcfl9" podStartSLOduration=125.113713034 podStartE2EDuration="2m5.113713034s" podCreationTimestamp="2025-10-11 02:46:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:57.087182391 +0000 UTC m=+148.020270035" watchObservedRunningTime="2025-10-11 02:48:57.113713034 +0000 UTC m=+148.046800678" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.115415 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-t4qhc" podStartSLOduration=126.115400467 podStartE2EDuration="2m6.115400467s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:48:57.112838431 +0000 UTC m=+148.045926075" watchObservedRunningTime="2025-10-11 02:48:57.115400467 +0000 UTC m=+148.048488111" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.127707 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" event={"ID":"cc2a69c8-612d-490d-82db-f7d78cdfa38c","Type":"ContainerStarted","Data":"07fe28f47d73fe667701411ba0ee4dcc76c954eb9ea25dc852eb528e254bf988"} Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.128533 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.128815 4953 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-4wgln container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:5443/healthz\": dial tcp 10.217.0.31:5443: connect: connection refused" start-of-body= Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.129215 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" podUID="4ed84470-d800-4f9a-b387-bf40ddfbc70e" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.31:5443/healthz\": dial tcp 10.217.0.31:5443: connect: connection refused" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.130278 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.130334 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.182046 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.182448 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.682432841 +0000 UTC m=+148.615520485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.282979 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.283191 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.783157454 +0000 UTC m=+148.716245098 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.284504 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.286926 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.786904421 +0000 UTC m=+148.719992065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.386756 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.88672865 +0000 UTC m=+148.819816294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.386805 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.387225 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.387670 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.887627013 +0000 UTC m=+148.820714657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.488858 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.489297 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.98927703 +0000 UTC m=+148.922364674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.489891 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.490337 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:57.990326747 +0000 UTC m=+148.923414391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.591866 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.592169 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.592202 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.592264 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.592329 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.592509 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.092478056 +0000 UTC m=+149.025565840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.599780 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.605438 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.607754 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.620742 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.693941 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.694339 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.194327408 +0000 UTC m=+149.127415052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.795822 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.796046 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.296015275 +0000 UTC m=+149.229102919 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.796558 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.797187 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.297150105 +0000 UTC m=+149.230237739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.808256 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.814655 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.898899 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.899083 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.399050288 +0000 UTC m=+149.332137932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.899234 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:57 crc kubenswrapper[4953]: E1011 02:48:57.899727 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.399715935 +0000 UTC m=+149.332803579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:57 crc kubenswrapper[4953]: I1011 02:48:57.906382 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.000322 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.000572 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.50054048 +0000 UTC m=+149.433628134 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.000802 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.001143 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.501129765 +0000 UTC m=+149.434217409 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.097855 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:48:58 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:48:58 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:48:58 crc kubenswrapper[4953]: healthz check failed Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.097942 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.102318 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.102660 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.602629148 +0000 UTC m=+149.535716792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.134626 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.134705 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.204010 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.205014 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.704992573 +0000 UTC m=+149.638080377 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.307438 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.309211 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.809186285 +0000 UTC m=+149.742273929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.408922 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.409243 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:58.90923089 +0000 UTC m=+149.842318534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.511328 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.511935 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.011916503 +0000 UTC m=+149.945004147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.512373 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.512725 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.012717514 +0000 UTC m=+149.945805158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.566875 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4wgln" Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.616423 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.617127 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.1170799 +0000 UTC m=+150.050167544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.718697 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.719068 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.219053695 +0000 UTC m=+150.152141339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.820361 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.820587 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.320547728 +0000 UTC m=+150.253635372 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.821120 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.821659 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.321644086 +0000 UTC m=+150.254731730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:58 crc kubenswrapper[4953]: I1011 02:48:58.922400 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:58 crc kubenswrapper[4953]: E1011 02:48:58.922761 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.422738168 +0000 UTC m=+150.355825812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.025634 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.026159 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.526136249 +0000 UTC m=+150.459223893 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.099299 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:48:59 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:48:59 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:48:59 crc kubenswrapper[4953]: healthz check failed Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.099780 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.116919 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b6569"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.118059 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.121935 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.126375 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.126551 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.626525164 +0000 UTC m=+150.559612808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.126706 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.127083 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.627069688 +0000 UTC m=+150.560157332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.139409 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ea7455f3e9038f2a493df71a01a9895f30da23329e44a7388b4971298208b1a2"} Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.143883 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"609d4c6c5ac33189e4aa44bf85cdcd3e7d628d4f9a77603086ec0bf3f1d8d836"} Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.147418 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b6569"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.160036 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"774ead0c28efc4a75c3b75fe1cbd4d40ba19092a7b63532874f4a01e798af1d5"} Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.227512 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.227911 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-catalog-content\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.228193 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-utilities\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.228231 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2x9t\" (UniqueName: \"kubernetes.io/projected/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-kube-api-access-b2x9t\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.228375 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.728353825 +0000 UTC m=+150.661441469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.329953 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-utilities\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.330015 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2x9t\" (UniqueName: \"kubernetes.io/projected/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-kube-api-access-b2x9t\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.330042 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.330088 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-catalog-content\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.331211 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-utilities\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.331620 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-catalog-content\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.331715 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.831696415 +0000 UTC m=+150.764784059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.346443 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-shv84"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.347742 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.353102 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.353760 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shv84"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.386241 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2x9t\" (UniqueName: \"kubernetes.io/projected/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-kube-api-access-b2x9t\") pod \"certified-operators-b6569\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.432169 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.432637 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.432386 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.932346886 +0000 UTC m=+150.865434530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.432745 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.432826 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6cnz\" (UniqueName: \"kubernetes.io/projected/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-kube-api-access-j6cnz\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.432852 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-catalog-content\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.432896 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-utilities\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.433300 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:48:59.93328955 +0000 UTC m=+150.866377194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.534180 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.534473 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6cnz\" (UniqueName: \"kubernetes.io/projected/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-kube-api-access-j6cnz\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.534518 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-catalog-content\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.534542 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-utilities\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.535154 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-utilities\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.535625 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.035591053 +0000 UTC m=+150.968678697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.535836 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-catalog-content\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.577815 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7bs5n"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.578860 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.592082 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.593417 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.601315 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.605790 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6cnz\" (UniqueName: \"kubernetes.io/projected/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-kube-api-access-j6cnz\") pod \"community-operators-shv84\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.605934 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.630163 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7bs5n"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.632701 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.637388 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.637743 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.137730022 +0000 UTC m=+151.070817666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.670625 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.741238 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.741473 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/740d0184-00b9-40d4-b3ca-b09ef8f99141-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.741498 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-catalog-content\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.741516 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/740d0184-00b9-40d4-b3ca-b09ef8f99141-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.741538 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt54v\" (UniqueName: \"kubernetes.io/projected/f56d79f7-70e8-42d5-b5ef-d499328f80f2-kube-api-access-zt54v\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.741586 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-utilities\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.741758 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.24174189 +0000 UTC m=+151.174829534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.757414 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wwsh8"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.769925 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.793758 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wwsh8"] Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.848494 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.848545 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-utilities\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.848628 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-catalog-content\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.848647 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/740d0184-00b9-40d4-b3ca-b09ef8f99141-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.848664 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/740d0184-00b9-40d4-b3ca-b09ef8f99141-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.848682 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt54v\" (UniqueName: \"kubernetes.io/projected/f56d79f7-70e8-42d5-b5ef-d499328f80f2-kube-api-access-zt54v\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.849304 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.349291188 +0000 UTC m=+151.282378832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.851477 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-utilities\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.851542 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-catalog-content\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.851663 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/740d0184-00b9-40d4-b3ca-b09ef8f99141-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.891013 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt54v\" (UniqueName: \"kubernetes.io/projected/f56d79f7-70e8-42d5-b5ef-d499328f80f2-kube-api-access-zt54v\") pod \"certified-operators-7bs5n\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.901360 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/740d0184-00b9-40d4-b3ca-b09ef8f99141-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.901787 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.922554 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.965413 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.965688 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfnnm\" (UniqueName: \"kubernetes.io/projected/f5960496-4c63-4015-ac78-20c65d324cf0-kube-api-access-tfnnm\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.965727 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-utilities\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:48:59 crc kubenswrapper[4953]: I1011 02:48:59.965772 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-catalog-content\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:48:59 crc kubenswrapper[4953]: E1011 02:48:59.966760 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.466717441 +0000 UTC m=+151.399805235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.068369 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfnnm\" (UniqueName: \"kubernetes.io/projected/f5960496-4c63-4015-ac78-20c65d324cf0-kube-api-access-tfnnm\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.068411 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-utilities\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.068455 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.069223 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-utilities\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.069264 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-catalog-content\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.069484 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-catalog-content\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.070032 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.57001643 +0000 UTC m=+151.503104074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.129203 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfnnm\" (UniqueName: \"kubernetes.io/projected/f5960496-4c63-4015-ac78-20c65d324cf0-kube-api-access-tfnnm\") pod \"community-operators-wwsh8\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.130830 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:00 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:00 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:00 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.130890 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.138017 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.171481 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.172050 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.672030156 +0000 UTC m=+151.605117800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.219657 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b97c24d0bc277720d5abb8e6dcb6ea4a5c008674ade27d0844513ddeaaa39be7"} Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.234362 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b6569"] Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.245095 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ea83edadeb0c086a388473a301ba30aa3031b8e08f764ce1021cf346edc34fb7"} Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.279503 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.279912 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.779895052 +0000 UTC m=+151.712982696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.284263 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" event={"ID":"cc2a69c8-612d-490d-82db-f7d78cdfa38c","Type":"ContainerStarted","Data":"8256d9817aaa12afff0b057cc498d03e6df73d15a47bf86ed51a78bd1e657137"} Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.284312 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" event={"ID":"cc2a69c8-612d-490d-82db-f7d78cdfa38c","Type":"ContainerStarted","Data":"60f2a963541f099b0921386b397e03d4e6db3f3770716b90030ee8caf3a23fff"} Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.318710 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"719e6b294086b0cc6a14352b5c1d56d32cd8ab4d1c93dbf0bf88bc25b3aa1a46"} Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.319396 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.385050 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.385797 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.885765937 +0000 UTC m=+151.818853571 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.453752 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-shv84"] Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.489910 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.490812 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:00.990797571 +0000 UTC m=+151.923885215 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.513200 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7bs5n"] Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.591146 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.591656 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.091631426 +0000 UTC m=+152.024719070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.596207 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.694482 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.694970 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.194954556 +0000 UTC m=+152.128042200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.714884 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dxblc" Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.727012 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wwsh8"] Oct 11 02:49:00 crc kubenswrapper[4953]: W1011 02:49:00.748132 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5960496_4c63_4015_ac78_20c65d324cf0.slice/crio-2a10b9eae2c478537992fb6b990a9db9e50476ba5cc6a678114df87d94edcc60 WatchSource:0}: Error finding container 2a10b9eae2c478537992fb6b990a9db9e50476ba5cc6a678114df87d94edcc60: Status 404 returned error can't find the container with id 2a10b9eae2c478537992fb6b990a9db9e50476ba5cc6a678114df87d94edcc60 Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.796513 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.796834 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.296798686 +0000 UTC m=+152.229886330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.797250 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.797823 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.297811602 +0000 UTC m=+152.230899426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.898371 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.899213 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.399190642 +0000 UTC m=+152.332278286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:00 crc kubenswrapper[4953]: I1011 02:49:00.899354 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:00 crc kubenswrapper[4953]: E1011 02:49:00.899831 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.399809088 +0000 UTC m=+152.332896922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.000901 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.001136 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.501104585 +0000 UTC m=+152.434192219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.001242 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.001566 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.501556987 +0000 UTC m=+152.434644631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.098648 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bmjtp"] Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.103152 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.106999 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:01 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:01 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:01 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.107198 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.107218 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.107289 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.110669 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.610638685 +0000 UTC m=+152.543726329 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.112646 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.113227 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.613216771 +0000 UTC m=+152.546304415 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.119976 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmjtp"] Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.142303 4953 patch_prober.go:28] interesting pod/apiserver-76f77b778f-t5tdt container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]log ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]etcd ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/generic-apiserver-start-informers ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/max-in-flight-filter ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 11 02:49:01 crc kubenswrapper[4953]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/project.openshift.io-projectcache ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/openshift.io-startinformers ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 11 02:49:01 crc kubenswrapper[4953]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 11 02:49:01 crc kubenswrapper[4953]: livez check failed Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.142390 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" podUID="eef84f8c-6025-40be-8289-bd7b4c6e5a55" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.213372 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.213571 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.713547494 +0000 UTC m=+152.646635138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.213740 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l855v\" (UniqueName: \"kubernetes.io/projected/da2262ea-b5ad-41fd-b049-8f281a8a23ea-kube-api-access-l855v\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.213782 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.213813 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-utilities\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.213864 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-catalog-content\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.214124 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.714116648 +0000 UTC m=+152.647204292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.245865 4953 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.255801 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-9v76x" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.314695 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.314854 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-catalog-content\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.314908 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.814873612 +0000 UTC m=+152.747961256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.315082 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l855v\" (UniqueName: \"kubernetes.io/projected/da2262ea-b5ad-41fd-b049-8f281a8a23ea-kube-api-access-l855v\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.315177 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.315262 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-utilities\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.315708 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.815683943 +0000 UTC m=+152.748771597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.315912 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-catalog-content\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.316276 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-utilities\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.318168 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.322334 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.329245 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wwsh8" event={"ID":"f5960496-4c63-4015-ac78-20c65d324cf0","Type":"ContainerStarted","Data":"2a10b9eae2c478537992fb6b990a9db9e50476ba5cc6a678114df87d94edcc60"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.330751 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shv84" event={"ID":"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7","Type":"ContainerStarted","Data":"199ec72869d0269909b7183fa45099a8c5bda5d27d66b3f2958e8fb81620c945"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.331508 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jh2h9" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.331672 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"740d0184-00b9-40d4-b3ca-b09ef8f99141","Type":"ContainerStarted","Data":"99463745b45aae668d5bcec67fb7450f852b68db3da53da4681308b3bffe5ac0"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.332776 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerStarted","Data":"29d1a0f6df093dfc6e11837418ca18d1566d1d0f7990011a2e31a1bb8f8b58b8"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.332799 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerStarted","Data":"8f4d054ddefeac7f2a65df8a9f2bf017835c2fe490fd5710faae1898885fec4f"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.334437 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerStarted","Data":"025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.334460 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerStarted","Data":"7cefb6f4ad22135c990b7818417bca575dc51e95d11ac7f49c7c649035d5ddd2"} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.352821 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l855v\" (UniqueName: \"kubernetes.io/projected/da2262ea-b5ad-41fd-b049-8f281a8a23ea-kube-api-access-l855v\") pod \"redhat-marketplace-bmjtp\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.401003 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.415957 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.418590 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:01.918556111 +0000 UTC m=+152.851643755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.419060 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.426360 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.499746 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.499803 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.501835 4953 patch_prober.go:28] interesting pod/console-f9d7485db-cv9v6 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.501922 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cv9v6" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.511131 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xslwr"] Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.512492 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.519978 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-utilities\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.520017 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-catalog-content\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.520089 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z2rv\" (UniqueName: \"kubernetes.io/projected/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-kube-api-access-9z2rv\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.520124 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.524357 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:02.024315173 +0000 UTC m=+152.957402817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.534022 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xslwr"] Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.621061 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.621595 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z2rv\" (UniqueName: \"kubernetes.io/projected/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-kube-api-access-9z2rv\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.621787 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:02.121763421 +0000 UTC m=+153.054851065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.621835 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-catalog-content\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.621860 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-utilities\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.624105 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-utilities\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.624235 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-catalog-content\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.644370 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z2rv\" (UniqueName: \"kubernetes.io/projected/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-kube-api-access-9z2rv\") pod \"redhat-marketplace-xslwr\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.687034 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmjtp"] Oct 11 02:49:01 crc kubenswrapper[4953]: W1011 02:49:01.701849 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda2262ea_b5ad_41fd_b049_8f281a8a23ea.slice/crio-63e94a84e0dad9bfdd9b916ec033470e16179400795ead99b6c80a32a471423b WatchSource:0}: Error finding container 63e94a84e0dad9bfdd9b916ec033470e16179400795ead99b6c80a32a471423b: Status 404 returned error can't find the container with id 63e94a84e0dad9bfdd9b916ec033470e16179400795ead99b6c80a32a471423b Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.727377 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.728911 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.729457 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:02.229421023 +0000 UTC m=+153.162508857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.738694 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.830728 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.831530 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-11 02:49:02.3314899 +0000 UTC m=+153.264577564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.932407 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:01 crc kubenswrapper[4953]: E1011 02:49:01.934374 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-11 02:49:02.434343327 +0000 UTC m=+153.367430971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-svdgp" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.967205 4953 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-11T02:49:01.245897686Z","Handler":null,"Name":""} Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.977380 4953 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.977485 4953 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 11 02:49:01 crc kubenswrapper[4953]: I1011 02:49:01.989371 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xslwr"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.033318 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.038589 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 11 02:49:02 crc kubenswrapper[4953]: W1011 02:49:02.065457 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5abe16cb_6674_4029_b0d4_1e2be39b9f8d.slice/crio-60ffd8f38d6250a5939413648f42c4aa561246689fa0e03db4f322e3b7358346 WatchSource:0}: Error finding container 60ffd8f38d6250a5939413648f42c4aa561246689fa0e03db4f322e3b7358346: Status 404 returned error can't find the container with id 60ffd8f38d6250a5939413648f42c4aa561246689fa0e03db4f322e3b7358346 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.095980 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.098493 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.099897 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:02 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:02 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:02 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.099952 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.107808 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-k7h6x" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.134916 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.215304 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.216309 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.219015 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.219263 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.225981 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.230523 4953 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.230574 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.236342 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.236512 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.321360 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-svdgp\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.337641 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.337741 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.337822 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.341899 4953 generic.go:334] "Generic (PLEG): container finished" podID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerID="29d1a0f6df093dfc6e11837418ca18d1566d1d0f7990011a2e31a1bb8f8b58b8" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.342009 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerDied","Data":"29d1a0f6df093dfc6e11837418ca18d1566d1d0f7990011a2e31a1bb8f8b58b8"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.344015 4953 generic.go:334] "Generic (PLEG): container finished" podID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerID="025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.344099 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerDied","Data":"025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.344272 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.345540 4953 generic.go:334] "Generic (PLEG): container finished" podID="f5960496-4c63-4015-ac78-20c65d324cf0" containerID="58aec2d3e8c0df912a23b942cfaea8570bc568e0f1af67d2c651d9e57b8f7f49" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.345553 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.345693 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wwsh8" event={"ID":"f5960496-4c63-4015-ac78-20c65d324cf0","Type":"ContainerDied","Data":"58aec2d3e8c0df912a23b942cfaea8570bc568e0f1af67d2c651d9e57b8f7f49"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.348341 4953 generic.go:334] "Generic (PLEG): container finished" podID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerID="f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.348395 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmjtp" event={"ID":"da2262ea-b5ad-41fd-b049-8f281a8a23ea","Type":"ContainerDied","Data":"f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.348418 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmjtp" event={"ID":"da2262ea-b5ad-41fd-b049-8f281a8a23ea","Type":"ContainerStarted","Data":"63e94a84e0dad9bfdd9b916ec033470e16179400795ead99b6c80a32a471423b"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.353830 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-kmshj" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.355789 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xslwr" event={"ID":"5abe16cb-6674-4029-b0d4-1e2be39b9f8d","Type":"ContainerStarted","Data":"60ffd8f38d6250a5939413648f42c4aa561246689fa0e03db4f322e3b7358346"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.357787 4953 generic.go:334] "Generic (PLEG): container finished" podID="a29cc960-b47b-494a-81bd-617e97aed612" containerID="488ce46fe2029dc1801f904b60647beeb54bf858b61e45f2a08105a49372a591" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.357859 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" event={"ID":"a29cc960-b47b-494a-81bd-617e97aed612","Type":"ContainerDied","Data":"488ce46fe2029dc1801f904b60647beeb54bf858b61e45f2a08105a49372a591"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.361377 4953 generic.go:334] "Generic (PLEG): container finished" podID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerID="33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.361461 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shv84" event={"ID":"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7","Type":"ContainerDied","Data":"33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.367678 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" event={"ID":"cc2a69c8-612d-490d-82db-f7d78cdfa38c","Type":"ContainerStarted","Data":"bf6b83dbc7dc00a81128d1bb3d83300c6fb42c3532dff16ef2ac267d465967b3"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.371576 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.372238 4953 generic.go:334] "Generic (PLEG): container finished" podID="740d0184-00b9-40d4-b3ca-b09ef8f99141" containerID="d276ecbda031256dc19ef5512d58c795e0e59964ec0c97d7f32b7ba407086ee9" exitCode=0 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.372367 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"740d0184-00b9-40d4-b3ca-b09ef8f99141","Type":"ContainerDied","Data":"d276ecbda031256dc19ef5512d58c795e0e59964ec0c97d7f32b7ba407086ee9"} Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.445969 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.446044 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.447001 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.447091 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.514501 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sf6k9"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.518931 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.520955 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.524223 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sf6k9"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.532128 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.577313 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-vlk7d" podStartSLOduration=13.577292897 podStartE2EDuration="13.577292897s" podCreationTimestamp="2025-10-11 02:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:49:02.572499364 +0000 UTC m=+153.505587018" watchObservedRunningTime="2025-10-11 02:49:02.577292897 +0000 UTC m=+153.510380541" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.578895 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.647577 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb7xk\" (UniqueName: \"kubernetes.io/projected/d491ebed-dc7a-41ac-8258-a21538878605-kube-api-access-mb7xk\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.647650 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-utilities\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.647678 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-catalog-content\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.749298 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb7xk\" (UniqueName: \"kubernetes.io/projected/d491ebed-dc7a-41ac-8258-a21538878605-kube-api-access-mb7xk\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.749818 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-utilities\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.749856 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-catalog-content\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.750618 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-utilities\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.750695 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-catalog-content\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.772542 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb7xk\" (UniqueName: \"kubernetes.io/projected/d491ebed-dc7a-41ac-8258-a21538878605-kube-api-access-mb7xk\") pod \"redhat-operators-sf6k9\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.783186 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.835717 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.851448 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-svdgp"] Oct 11 02:49:02 crc kubenswrapper[4953]: W1011 02:49:02.891381 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56d33bde_ddb0_4a97_8c45_5df1f80cbdbd.slice/crio-d1c34714912d78f30be509a7c31b10da5a4626da36dfcc7b5625ce993d85e956 WatchSource:0}: Error finding container d1c34714912d78f30be509a7c31b10da5a4626da36dfcc7b5625ce993d85e956: Status 404 returned error can't find the container with id d1c34714912d78f30be509a7c31b10da5a4626da36dfcc7b5625ce993d85e956 Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.897964 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zc8xb"] Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.901060 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:02 crc kubenswrapper[4953]: I1011 02:49:02.916469 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zc8xb"] Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.054409 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-catalog-content\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.054793 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-utilities\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.054848 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x29k\" (UniqueName: \"kubernetes.io/projected/346b5539-0462-42c3-a3c2-0670a7f59285-kube-api-access-8x29k\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.099333 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:03 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:03 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:03 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.099391 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.143339 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sf6k9"] Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.156347 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x29k\" (UniqueName: \"kubernetes.io/projected/346b5539-0462-42c3-a3c2-0670a7f59285-kube-api-access-8x29k\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.156406 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-catalog-content\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.156467 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-utilities\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.157137 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-catalog-content\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.157575 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-utilities\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.184180 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x29k\" (UniqueName: \"kubernetes.io/projected/346b5539-0462-42c3-a3c2-0670a7f59285-kube-api-access-8x29k\") pod \"redhat-operators-zc8xb\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.217955 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.418151 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"32e8ba89-a863-49cf-a1c0-9060de6c7cfe","Type":"ContainerStarted","Data":"c1099ae47daaead919eba9bbb153cde301fef96e7126de5e9bfcb598ab11ad95"} Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.418205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"32e8ba89-a863-49cf-a1c0-9060de6c7cfe","Type":"ContainerStarted","Data":"ab0136b95292378a8aef17db8b74dba5ab4120617e1850517365107bf1fead9c"} Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.423748 4953 generic.go:334] "Generic (PLEG): container finished" podID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerID="60fa699a6559700b9fb63faf5bd766bfafb5bc333c8365f0daabd3e7b3673113" exitCode=0 Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.424106 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xslwr" event={"ID":"5abe16cb-6674-4029-b0d4-1e2be39b9f8d","Type":"ContainerDied","Data":"60fa699a6559700b9fb63faf5bd766bfafb5bc333c8365f0daabd3e7b3673113"} Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.432525 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerStarted","Data":"a6f4bce7b6435cf7f47472cf30bc818b656567c630904f5e977ec494146b929b"} Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.436724 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" event={"ID":"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd","Type":"ContainerStarted","Data":"5ad8a68fc99249acf7196b2d01fba3f01c517a1f4454e9245f48a35fb8de6ea4"} Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.436796 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" event={"ID":"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd","Type":"ContainerStarted","Data":"d1c34714912d78f30be509a7c31b10da5a4626da36dfcc7b5625ce993d85e956"} Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.472989 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" podStartSLOduration=132.472970353 podStartE2EDuration="2m12.472970353s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:49:03.472392588 +0000 UTC m=+154.405480232" watchObservedRunningTime="2025-10-11 02:49:03.472970353 +0000 UTC m=+154.406057997" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.547450 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zc8xb"] Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.705973 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.747676 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.805506 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.871418 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/740d0184-00b9-40d4-b3ca-b09ef8f99141-kube-api-access\") pod \"740d0184-00b9-40d4-b3ca-b09ef8f99141\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.871468 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29cc960-b47b-494a-81bd-617e97aed612-config-volume\") pod \"a29cc960-b47b-494a-81bd-617e97aed612\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.871519 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/740d0184-00b9-40d4-b3ca-b09ef8f99141-kubelet-dir\") pod \"740d0184-00b9-40d4-b3ca-b09ef8f99141\" (UID: \"740d0184-00b9-40d4-b3ca-b09ef8f99141\") " Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.871574 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29cc960-b47b-494a-81bd-617e97aed612-secret-volume\") pod \"a29cc960-b47b-494a-81bd-617e97aed612\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.871669 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdxsh\" (UniqueName: \"kubernetes.io/projected/a29cc960-b47b-494a-81bd-617e97aed612-kube-api-access-vdxsh\") pod \"a29cc960-b47b-494a-81bd-617e97aed612\" (UID: \"a29cc960-b47b-494a-81bd-617e97aed612\") " Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.872377 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29cc960-b47b-494a-81bd-617e97aed612-config-volume" (OuterVolumeSpecName: "config-volume") pod "a29cc960-b47b-494a-81bd-617e97aed612" (UID: "a29cc960-b47b-494a-81bd-617e97aed612"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.872724 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/740d0184-00b9-40d4-b3ca-b09ef8f99141-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "740d0184-00b9-40d4-b3ca-b09ef8f99141" (UID: "740d0184-00b9-40d4-b3ca-b09ef8f99141"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.876797 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/740d0184-00b9-40d4-b3ca-b09ef8f99141-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "740d0184-00b9-40d4-b3ca-b09ef8f99141" (UID: "740d0184-00b9-40d4-b3ca-b09ef8f99141"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.877861 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29cc960-b47b-494a-81bd-617e97aed612-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a29cc960-b47b-494a-81bd-617e97aed612" (UID: "a29cc960-b47b-494a-81bd-617e97aed612"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.878099 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29cc960-b47b-494a-81bd-617e97aed612-kube-api-access-vdxsh" (OuterVolumeSpecName: "kube-api-access-vdxsh") pod "a29cc960-b47b-494a-81bd-617e97aed612" (UID: "a29cc960-b47b-494a-81bd-617e97aed612"). InnerVolumeSpecName "kube-api-access-vdxsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.972925 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdxsh\" (UniqueName: \"kubernetes.io/projected/a29cc960-b47b-494a-81bd-617e97aed612-kube-api-access-vdxsh\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.972982 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/740d0184-00b9-40d4-b3ca-b09ef8f99141-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.973080 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a29cc960-b47b-494a-81bd-617e97aed612-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.973104 4953 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/740d0184-00b9-40d4-b3ca-b09ef8f99141-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:03 crc kubenswrapper[4953]: I1011 02:49:03.973146 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a29cc960-b47b-494a-81bd-617e97aed612-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.107916 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:04 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:04 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:04 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.108006 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.447268 4953 generic.go:334] "Generic (PLEG): container finished" podID="d491ebed-dc7a-41ac-8258-a21538878605" containerID="aa305cec237c47101d580519897e9bdf30460996b90666f5af2979d79929ba2c" exitCode=0 Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.447319 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerDied","Data":"aa305cec237c47101d580519897e9bdf30460996b90666f5af2979d79929ba2c"} Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.456071 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"740d0184-00b9-40d4-b3ca-b09ef8f99141","Type":"ContainerDied","Data":"99463745b45aae668d5bcec67fb7450f852b68db3da53da4681308b3bffe5ac0"} Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.456126 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.456132 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99463745b45aae668d5bcec67fb7450f852b68db3da53da4681308b3bffe5ac0" Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.459170 4953 generic.go:334] "Generic (PLEG): container finished" podID="346b5539-0462-42c3-a3c2-0670a7f59285" containerID="d08d2daa6362697d1cf7d27d1dc1ae3c74506dc8a285be25493d9cc1235b413c" exitCode=0 Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.459263 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerDied","Data":"d08d2daa6362697d1cf7d27d1dc1ae3c74506dc8a285be25493d9cc1235b413c"} Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.459354 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerStarted","Data":"dd56983c18a9a592a528be399df6b611c45b3590146c87a8569b75611ea6cedf"} Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.461580 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" event={"ID":"a29cc960-b47b-494a-81bd-617e97aed612","Type":"ContainerDied","Data":"d3152770db94ce6afa131f02299332f26bb8b744e022839dd90562c3cae96453"} Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.461596 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct" Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.461620 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3152770db94ce6afa131f02299332f26bb8b744e022839dd90562c3cae96453" Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.464133 4953 generic.go:334] "Generic (PLEG): container finished" podID="32e8ba89-a863-49cf-a1c0-9060de6c7cfe" containerID="c1099ae47daaead919eba9bbb153cde301fef96e7126de5e9bfcb598ab11ad95" exitCode=0 Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.464243 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"32e8ba89-a863-49cf-a1c0-9060de6c7cfe","Type":"ContainerDied","Data":"c1099ae47daaead919eba9bbb153cde301fef96e7126de5e9bfcb598ab11ad95"} Oct 11 02:49:04 crc kubenswrapper[4953]: I1011 02:49:04.464413 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:05 crc kubenswrapper[4953]: I1011 02:49:05.097256 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:05 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:05 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:05 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:05 crc kubenswrapper[4953]: I1011 02:49:05.097477 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:06 crc kubenswrapper[4953]: I1011 02:49:06.097763 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:06 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:06 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:06 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:06 crc kubenswrapper[4953]: I1011 02:49:06.098334 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:06 crc kubenswrapper[4953]: I1011 02:49:06.144151 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:49:06 crc kubenswrapper[4953]: I1011 02:49:06.149234 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-t5tdt" Oct 11 02:49:07 crc kubenswrapper[4953]: I1011 02:49:07.100962 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:07 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:07 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:07 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:07 crc kubenswrapper[4953]: I1011 02:49:07.101059 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:07 crc kubenswrapper[4953]: I1011 02:49:07.771936 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-qp84s" Oct 11 02:49:08 crc kubenswrapper[4953]: I1011 02:49:08.097022 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:08 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:08 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:08 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:08 crc kubenswrapper[4953]: I1011 02:49:08.097091 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:09 crc kubenswrapper[4953]: I1011 02:49:09.097142 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:09 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:09 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:09 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:09 crc kubenswrapper[4953]: I1011 02:49:09.097239 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:10 crc kubenswrapper[4953]: I1011 02:49:10.098363 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:10 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:10 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:10 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:10 crc kubenswrapper[4953]: I1011 02:49:10.098832 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:11 crc kubenswrapper[4953]: I1011 02:49:11.096164 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:11 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:11 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:11 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:11 crc kubenswrapper[4953]: I1011 02:49:11.096238 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:11 crc kubenswrapper[4953]: I1011 02:49:11.317939 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:49:11 crc kubenswrapper[4953]: I1011 02:49:11.318005 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:49:11 crc kubenswrapper[4953]: I1011 02:49:11.498510 4953 patch_prober.go:28] interesting pod/console-f9d7485db-cv9v6 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 11 02:49:11 crc kubenswrapper[4953]: I1011 02:49:11.498618 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cv9v6" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 11 02:49:12 crc kubenswrapper[4953]: I1011 02:49:12.097454 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:12 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:12 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:12 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:12 crc kubenswrapper[4953]: I1011 02:49:12.097563 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:12 crc kubenswrapper[4953]: I1011 02:49:12.481428 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:12 crc kubenswrapper[4953]: I1011 02:49:12.486451 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:12 crc kubenswrapper[4953]: I1011 02:49:12.486524 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:12 crc kubenswrapper[4953]: I1011 02:49:12.481515 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.018850 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.097960 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:13 crc kubenswrapper[4953]: [-]has-synced failed: reason withheld Oct 11 02:49:13 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:13 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.098043 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.150984 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kube-api-access\") pod \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.151064 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kubelet-dir\") pod \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\" (UID: \"32e8ba89-a863-49cf-a1c0-9060de6c7cfe\") " Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.151305 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "32e8ba89-a863-49cf-a1c0-9060de6c7cfe" (UID: "32e8ba89-a863-49cf-a1c0-9060de6c7cfe"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.151570 4953 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.171671 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "32e8ba89-a863-49cf-a1c0-9060de6c7cfe" (UID: "32e8ba89-a863-49cf-a1c0-9060de6c7cfe"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.253352 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/32e8ba89-a863-49cf-a1c0-9060de6c7cfe-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.558908 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"32e8ba89-a863-49cf-a1c0-9060de6c7cfe","Type":"ContainerDied","Data":"ab0136b95292378a8aef17db8b74dba5ab4120617e1850517365107bf1fead9c"} Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.558969 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab0136b95292378a8aef17db8b74dba5ab4120617e1850517365107bf1fead9c" Oct 11 02:49:13 crc kubenswrapper[4953]: I1011 02:49:13.559015 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 11 02:49:14 crc kubenswrapper[4953]: I1011 02:49:14.097950 4953 patch_prober.go:28] interesting pod/router-default-5444994796-hq74r container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 11 02:49:14 crc kubenswrapper[4953]: [+]has-synced ok Oct 11 02:49:14 crc kubenswrapper[4953]: [+]process-running ok Oct 11 02:49:14 crc kubenswrapper[4953]: healthz check failed Oct 11 02:49:14 crc kubenswrapper[4953]: I1011 02:49:14.098029 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-hq74r" podUID="090fd335-08ca-49b6-beb6-80dc582340a1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 11 02:49:14 crc kubenswrapper[4953]: I1011 02:49:14.268392 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:49:14 crc kubenswrapper[4953]: I1011 02:49:14.272220 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/329460ba-d6c9-4774-b8d3-354e4406575c-metrics-certs\") pod \"network-metrics-daemon-bp9sq\" (UID: \"329460ba-d6c9-4774-b8d3-354e4406575c\") " pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:49:14 crc kubenswrapper[4953]: I1011 02:49:14.413529 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bp9sq" Oct 11 02:49:15 crc kubenswrapper[4953]: I1011 02:49:15.096890 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:49:15 crc kubenswrapper[4953]: I1011 02:49:15.099807 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-hq74r" Oct 11 02:49:21 crc kubenswrapper[4953]: I1011 02:49:21.514618 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:49:21 crc kubenswrapper[4953]: I1011 02:49:21.519181 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.447084 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.447152 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.447208 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.448689 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"31ea7b80a20e4cac173e2dee66e3f473c16088ada35609938e16f94ae6221ef5"} pod="openshift-console/downloads-7954f5f757-7rdbv" containerMessage="Container download-server failed liveness probe, will be restarted" Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.448781 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" containerID="cri-o://31ea7b80a20e4cac173e2dee66e3f473c16088ada35609938e16f94ae6221ef5" gracePeriod=2 Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.449621 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.449645 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.453511 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.453544 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:22 crc kubenswrapper[4953]: I1011 02:49:22.582735 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:49:23 crc kubenswrapper[4953]: I1011 02:49:23.640725 4953 generic.go:334] "Generic (PLEG): container finished" podID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerID="31ea7b80a20e4cac173e2dee66e3f473c16088ada35609938e16f94ae6221ef5" exitCode=0 Oct 11 02:49:23 crc kubenswrapper[4953]: I1011 02:49:23.640837 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7rdbv" event={"ID":"93a1092d-11b9-4b17-bf72-e7296d56dbf3","Type":"ContainerDied","Data":"31ea7b80a20e4cac173e2dee66e3f473c16088ada35609938e16f94ae6221ef5"} Oct 11 02:49:25 crc kubenswrapper[4953]: E1011 02:49:25.281844 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 11 02:49:25 crc kubenswrapper[4953]: E1011 02:49:25.282132 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b2x9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-b6569_openshift-marketplace(ae65cc66-5a1f-4696-9414-1cd71ea4c36b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 02:49:25 crc kubenswrapper[4953]: E1011 02:49:25.283805 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-b6569" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" Oct 11 02:49:27 crc kubenswrapper[4953]: E1011 02:49:27.160084 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-b6569" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" Oct 11 02:49:32 crc kubenswrapper[4953]: I1011 02:49:32.075824 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-b92s9" Oct 11 02:49:32 crc kubenswrapper[4953]: E1011 02:49:32.326680 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 11 02:49:32 crc kubenswrapper[4953]: E1011 02:49:32.327177 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tfnnm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wwsh8_openshift-marketplace(f5960496-4c63-4015-ac78-20c65d324cf0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 02:49:32 crc kubenswrapper[4953]: E1011 02:49:32.328408 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wwsh8" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" Oct 11 02:49:32 crc kubenswrapper[4953]: I1011 02:49:32.446775 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:32 crc kubenswrapper[4953]: I1011 02:49:32.446869 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:38 crc kubenswrapper[4953]: I1011 02:49:38.098968 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 11 02:49:41 crc kubenswrapper[4953]: I1011 02:49:41.316631 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:49:41 crc kubenswrapper[4953]: I1011 02:49:41.316998 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:49:42 crc kubenswrapper[4953]: I1011 02:49:42.446309 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:42 crc kubenswrapper[4953]: I1011 02:49:42.446419 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.794917 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.795538 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l855v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-bmjtp_openshift-marketplace(da2262ea-b5ad-41fd-b049-8f281a8a23ea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.796693 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-bmjtp" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.811846 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.812005 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8x29k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-zc8xb_openshift-marketplace(346b5539-0462-42c3-a3c2-0670a7f59285): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.814041 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-zc8xb" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.854253 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.854355 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mb7xk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-sf6k9_openshift-marketplace(d491ebed-dc7a-41ac-8258-a21538878605): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 11 02:49:45 crc kubenswrapper[4953]: E1011 02:49:45.855824 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-sf6k9" podUID="d491ebed-dc7a-41ac-8258-a21538878605" Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.228170 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-bp9sq"] Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.791264 4953 generic.go:334] "Generic (PLEG): container finished" podID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerID="56b89c41e4e8afbb4d1afb9da0e6e402665a4a5b6349d5c4713aeb3eca07feac" exitCode=0 Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.791324 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerDied","Data":"56b89c41e4e8afbb4d1afb9da0e6e402665a4a5b6349d5c4713aeb3eca07feac"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.795219 4953 generic.go:334] "Generic (PLEG): container finished" podID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerID="901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193" exitCode=0 Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.796027 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerDied","Data":"901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.799309 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" event={"ID":"329460ba-d6c9-4774-b8d3-354e4406575c","Type":"ContainerStarted","Data":"a907da4a459a3b32731d836bddfa717453c87859f47f1dd7b733815329489563"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.799538 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" event={"ID":"329460ba-d6c9-4774-b8d3-354e4406575c","Type":"ContainerStarted","Data":"cd001e009903878a44f4b7421c2d735a8ca7051f530fd5cec9fbfacff8dcf882"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.799551 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bp9sq" event={"ID":"329460ba-d6c9-4774-b8d3-354e4406575c","Type":"ContainerStarted","Data":"7e1873a66c5645356a9ffeef4c6c5b47e700a8509f8bed5a1e9b59f508b834b2"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.800816 4953 generic.go:334] "Generic (PLEG): container finished" podID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerID="7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5" exitCode=0 Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.801138 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shv84" event={"ID":"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7","Type":"ContainerDied","Data":"7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.805094 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7rdbv" event={"ID":"93a1092d-11b9-4b17-bf72-e7296d56dbf3","Type":"ContainerStarted","Data":"ec3d67a64dd238587c9f2e76366cb4bea2f9ff2871f4d7a7edccb9c042793630"} Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.805382 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.805824 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.805886 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.810888 4953 generic.go:334] "Generic (PLEG): container finished" podID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerID="6ef0f2b6b41707140348a18411fa9cd7b48fa1bcbbc7eb707c8c4007a24edccc" exitCode=0 Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.811571 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xslwr" event={"ID":"5abe16cb-6674-4029-b0d4-1e2be39b9f8d","Type":"ContainerDied","Data":"6ef0f2b6b41707140348a18411fa9cd7b48fa1bcbbc7eb707c8c4007a24edccc"} Oct 11 02:49:46 crc kubenswrapper[4953]: E1011 02:49:46.823297 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-sf6k9" podUID="d491ebed-dc7a-41ac-8258-a21538878605" Oct 11 02:49:46 crc kubenswrapper[4953]: E1011 02:49:46.823723 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-zc8xb" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" Oct 11 02:49:46 crc kubenswrapper[4953]: E1011 02:49:46.830382 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-bmjtp" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" Oct 11 02:49:46 crc kubenswrapper[4953]: I1011 02:49:46.879405 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-bp9sq" podStartSLOduration=175.879382559 podStartE2EDuration="2m55.879382559s" podCreationTimestamp="2025-10-11 02:46:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:49:46.868508699 +0000 UTC m=+197.801596363" watchObservedRunningTime="2025-10-11 02:49:46.879382559 +0000 UTC m=+197.812470203" Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.818093 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xslwr" event={"ID":"5abe16cb-6674-4029-b0d4-1e2be39b9f8d","Type":"ContainerStarted","Data":"b8dddd432718115aef2ccbfe5a65e65944fe997fd2af79a18f4a321809e96a54"} Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.821733 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerStarted","Data":"c29d336c7b528c421baf7c4e7321e4e77b031ef3829ee21ad9536f1c6b62d3a5"} Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.826379 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerStarted","Data":"392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2"} Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.829350 4953 generic.go:334] "Generic (PLEG): container finished" podID="f5960496-4c63-4015-ac78-20c65d324cf0" containerID="7f37397d23070a7611ba763654079998276e0f6c9e55d1bdd91eb58eccc6aba1" exitCode=0 Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.829425 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wwsh8" event={"ID":"f5960496-4c63-4015-ac78-20c65d324cf0","Type":"ContainerDied","Data":"7f37397d23070a7611ba763654079998276e0f6c9e55d1bdd91eb58eccc6aba1"} Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.835698 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shv84" event={"ID":"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7","Type":"ContainerStarted","Data":"3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6"} Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.836369 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.836436 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.841901 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xslwr" podStartSLOduration=2.8757413979999997 podStartE2EDuration="46.841883062s" podCreationTimestamp="2025-10-11 02:49:01 +0000 UTC" firstStartedPulling="2025-10-11 02:49:03.425393518 +0000 UTC m=+154.358481162" lastFinishedPulling="2025-10-11 02:49:47.391535162 +0000 UTC m=+198.324622826" observedRunningTime="2025-10-11 02:49:47.840986969 +0000 UTC m=+198.774074613" watchObservedRunningTime="2025-10-11 02:49:47.841883062 +0000 UTC m=+198.774970706" Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.856731 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b6569" podStartSLOduration=3.703990179 podStartE2EDuration="48.856706164s" podCreationTimestamp="2025-10-11 02:48:59 +0000 UTC" firstStartedPulling="2025-10-11 02:49:02.348007995 +0000 UTC m=+153.281095639" lastFinishedPulling="2025-10-11 02:49:47.50072398 +0000 UTC m=+198.433811624" observedRunningTime="2025-10-11 02:49:47.856486279 +0000 UTC m=+198.789573933" watchObservedRunningTime="2025-10-11 02:49:47.856706164 +0000 UTC m=+198.789793808" Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.877205 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7bs5n" podStartSLOduration=3.691713184 podStartE2EDuration="48.877184433s" podCreationTimestamp="2025-10-11 02:48:59 +0000 UTC" firstStartedPulling="2025-10-11 02:49:02.343789317 +0000 UTC m=+153.276876981" lastFinishedPulling="2025-10-11 02:49:47.529260596 +0000 UTC m=+198.462348230" observedRunningTime="2025-10-11 02:49:47.872385719 +0000 UTC m=+198.805473383" watchObservedRunningTime="2025-10-11 02:49:47.877184433 +0000 UTC m=+198.810272087" Oct 11 02:49:47 crc kubenswrapper[4953]: I1011 02:49:47.890071 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-shv84" podStartSLOduration=3.800081468 podStartE2EDuration="48.890042564s" podCreationTimestamp="2025-10-11 02:48:59 +0000 UTC" firstStartedPulling="2025-10-11 02:49:02.363975906 +0000 UTC m=+153.297063570" lastFinishedPulling="2025-10-11 02:49:47.453937022 +0000 UTC m=+198.387024666" observedRunningTime="2025-10-11 02:49:47.886241656 +0000 UTC m=+198.819329310" watchObservedRunningTime="2025-10-11 02:49:47.890042564 +0000 UTC m=+198.823130208" Oct 11 02:49:49 crc kubenswrapper[4953]: I1011 02:49:49.433045 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:49:49 crc kubenswrapper[4953]: I1011 02:49:49.433100 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:49:49 crc kubenswrapper[4953]: I1011 02:49:49.671292 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:49:49 crc kubenswrapper[4953]: I1011 02:49:49.671369 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:49:49 crc kubenswrapper[4953]: I1011 02:49:49.902525 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:49:49 crc kubenswrapper[4953]: I1011 02:49:49.902592 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:49:50 crc kubenswrapper[4953]: I1011 02:49:50.039898 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:49:50 crc kubenswrapper[4953]: I1011 02:49:50.040331 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:49:50 crc kubenswrapper[4953]: I1011 02:49:50.041760 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:49:51 crc kubenswrapper[4953]: I1011 02:49:51.740241 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:51 crc kubenswrapper[4953]: I1011 02:49:51.740664 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:51 crc kubenswrapper[4953]: I1011 02:49:51.789660 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:49:52 crc kubenswrapper[4953]: I1011 02:49:52.446769 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:52 crc kubenswrapper[4953]: I1011 02:49:52.446823 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:52 crc kubenswrapper[4953]: I1011 02:49:52.446827 4953 patch_prober.go:28] interesting pod/downloads-7954f5f757-7rdbv container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" start-of-body= Oct 11 02:49:52 crc kubenswrapper[4953]: I1011 02:49:52.446893 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7rdbv" podUID="93a1092d-11b9-4b17-bf72-e7296d56dbf3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.43:8080/\": dial tcp 10.217.0.43:8080: connect: connection refused" Oct 11 02:49:53 crc kubenswrapper[4953]: I1011 02:49:53.869320 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wwsh8" event={"ID":"f5960496-4c63-4015-ac78-20c65d324cf0","Type":"ContainerStarted","Data":"003ebb8eab8a2ddb9caebefc737eee167a657a045ce95a60ab6baae623984f7b"} Oct 11 02:49:58 crc kubenswrapper[4953]: I1011 02:49:58.829534 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wwsh8" podStartSLOduration=9.203003027 podStartE2EDuration="59.829501079s" podCreationTimestamp="2025-10-11 02:48:59 +0000 UTC" firstStartedPulling="2025-10-11 02:49:02.348360844 +0000 UTC m=+153.281448508" lastFinishedPulling="2025-10-11 02:49:52.974858886 +0000 UTC m=+203.907946560" observedRunningTime="2025-10-11 02:49:54.898598669 +0000 UTC m=+205.831686353" watchObservedRunningTime="2025-10-11 02:49:58.829501079 +0000 UTC m=+209.762588763" Oct 11 02:49:59 crc kubenswrapper[4953]: I1011 02:49:59.483181 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:49:59 crc kubenswrapper[4953]: I1011 02:49:59.719143 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:49:59 crc kubenswrapper[4953]: I1011 02:49:59.958148 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:50:00 crc kubenswrapper[4953]: I1011 02:50:00.138737 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:50:00 crc kubenswrapper[4953]: I1011 02:50:00.138801 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:50:00 crc kubenswrapper[4953]: I1011 02:50:00.179729 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:50:00 crc kubenswrapper[4953]: I1011 02:50:00.968432 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:50:01 crc kubenswrapper[4953]: I1011 02:50:01.807549 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:50:02 crc kubenswrapper[4953]: I1011 02:50:02.029402 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wwsh8"] Oct 11 02:50:02 crc kubenswrapper[4953]: I1011 02:50:02.470288 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7rdbv" Oct 11 02:50:02 crc kubenswrapper[4953]: I1011 02:50:02.924849 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wwsh8" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="registry-server" containerID="cri-o://003ebb8eab8a2ddb9caebefc737eee167a657a045ce95a60ab6baae623984f7b" gracePeriod=2 Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.229030 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7bs5n"] Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.229576 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7bs5n" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="registry-server" containerID="cri-o://c29d336c7b528c421baf7c4e7321e4e77b031ef3829ee21ad9536f1c6b62d3a5" gracePeriod=2 Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.428582 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xslwr"] Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.428881 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xslwr" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="registry-server" containerID="cri-o://b8dddd432718115aef2ccbfe5a65e65944fe997fd2af79a18f4a321809e96a54" gracePeriod=2 Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.945853 4953 generic.go:334] "Generic (PLEG): container finished" podID="f5960496-4c63-4015-ac78-20c65d324cf0" containerID="003ebb8eab8a2ddb9caebefc737eee167a657a045ce95a60ab6baae623984f7b" exitCode=0 Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.945940 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wwsh8" event={"ID":"f5960496-4c63-4015-ac78-20c65d324cf0","Type":"ContainerDied","Data":"003ebb8eab8a2ddb9caebefc737eee167a657a045ce95a60ab6baae623984f7b"} Oct 11 02:50:04 crc kubenswrapper[4953]: I1011 02:50:04.948483 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerStarted","Data":"8ad5ea18a7c37cb43dfada2857e4b93650c1feb48cf421db45956be88987420a"} Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.553019 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.693116 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-utilities\") pod \"f5960496-4c63-4015-ac78-20c65d324cf0\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.693185 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfnnm\" (UniqueName: \"kubernetes.io/projected/f5960496-4c63-4015-ac78-20c65d324cf0-kube-api-access-tfnnm\") pod \"f5960496-4c63-4015-ac78-20c65d324cf0\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.693224 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-catalog-content\") pod \"f5960496-4c63-4015-ac78-20c65d324cf0\" (UID: \"f5960496-4c63-4015-ac78-20c65d324cf0\") " Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.694234 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-utilities" (OuterVolumeSpecName: "utilities") pod "f5960496-4c63-4015-ac78-20c65d324cf0" (UID: "f5960496-4c63-4015-ac78-20c65d324cf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.702493 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5960496-4c63-4015-ac78-20c65d324cf0-kube-api-access-tfnnm" (OuterVolumeSpecName: "kube-api-access-tfnnm") pod "f5960496-4c63-4015-ac78-20c65d324cf0" (UID: "f5960496-4c63-4015-ac78-20c65d324cf0"). InnerVolumeSpecName "kube-api-access-tfnnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.761950 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5960496-4c63-4015-ac78-20c65d324cf0" (UID: "f5960496-4c63-4015-ac78-20c65d324cf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.794737 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfnnm\" (UniqueName: \"kubernetes.io/projected/f5960496-4c63-4015-ac78-20c65d324cf0-kube-api-access-tfnnm\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.794779 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.794788 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5960496-4c63-4015-ac78-20c65d324cf0-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.956474 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerStarted","Data":"5cb88f07d3f50c95973c5ec61ca9ac282c5690a10d2119f18e406dc496dd0222"} Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.959375 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wwsh8" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.959382 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wwsh8" event={"ID":"f5960496-4c63-4015-ac78-20c65d324cf0","Type":"ContainerDied","Data":"2a10b9eae2c478537992fb6b990a9db9e50476ba5cc6a678114df87d94edcc60"} Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.959957 4953 scope.go:117] "RemoveContainer" containerID="003ebb8eab8a2ddb9caebefc737eee167a657a045ce95a60ab6baae623984f7b" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.961941 4953 generic.go:334] "Generic (PLEG): container finished" podID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerID="b8dddd432718115aef2ccbfe5a65e65944fe997fd2af79a18f4a321809e96a54" exitCode=0 Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.962022 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xslwr" event={"ID":"5abe16cb-6674-4029-b0d4-1e2be39b9f8d","Type":"ContainerDied","Data":"b8dddd432718115aef2ccbfe5a65e65944fe997fd2af79a18f4a321809e96a54"} Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.965029 4953 generic.go:334] "Generic (PLEG): container finished" podID="d491ebed-dc7a-41ac-8258-a21538878605" containerID="8ad5ea18a7c37cb43dfada2857e4b93650c1feb48cf421db45956be88987420a" exitCode=0 Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.965731 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerDied","Data":"8ad5ea18a7c37cb43dfada2857e4b93650c1feb48cf421db45956be88987420a"} Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.968244 4953 generic.go:334] "Generic (PLEG): container finished" podID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerID="c29d336c7b528c421baf7c4e7321e4e77b031ef3829ee21ad9536f1c6b62d3a5" exitCode=0 Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.968299 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerDied","Data":"c29d336c7b528c421baf7c4e7321e4e77b031ef3829ee21ad9536f1c6b62d3a5"} Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.978842 4953 scope.go:117] "RemoveContainer" containerID="7f37397d23070a7611ba763654079998276e0f6c9e55d1bdd91eb58eccc6aba1" Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.984302 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wwsh8"] Oct 11 02:50:05 crc kubenswrapper[4953]: I1011 02:50:05.988032 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wwsh8"] Oct 11 02:50:06 crc kubenswrapper[4953]: I1011 02:50:06.000945 4953 scope.go:117] "RemoveContainer" containerID="58aec2d3e8c0df912a23b942cfaea8570bc568e0f1af67d2c651d9e57b8f7f49" Oct 11 02:50:06 crc kubenswrapper[4953]: I1011 02:50:06.981787 4953 generic.go:334] "Generic (PLEG): container finished" podID="346b5539-0462-42c3-a3c2-0670a7f59285" containerID="5cb88f07d3f50c95973c5ec61ca9ac282c5690a10d2119f18e406dc496dd0222" exitCode=0 Oct 11 02:50:06 crc kubenswrapper[4953]: I1011 02:50:06.981956 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerDied","Data":"5cb88f07d3f50c95973c5ec61ca9ac282c5690a10d2119f18e406dc496dd0222"} Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.636375 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.743312 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.808509 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" path="/var/lib/kubelet/pods/f5960496-4c63-4015-ac78-20c65d324cf0/volumes" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.824501 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-catalog-content\") pod \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.824621 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-utilities\") pod \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.824747 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z2rv\" (UniqueName: \"kubernetes.io/projected/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-kube-api-access-9z2rv\") pod \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\" (UID: \"5abe16cb-6674-4029-b0d4-1e2be39b9f8d\") " Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.825909 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-utilities" (OuterVolumeSpecName: "utilities") pod "5abe16cb-6674-4029-b0d4-1e2be39b9f8d" (UID: "5abe16cb-6674-4029-b0d4-1e2be39b9f8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.831866 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-kube-api-access-9z2rv" (OuterVolumeSpecName: "kube-api-access-9z2rv") pod "5abe16cb-6674-4029-b0d4-1e2be39b9f8d" (UID: "5abe16cb-6674-4029-b0d4-1e2be39b9f8d"). InnerVolumeSpecName "kube-api-access-9z2rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.838173 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5abe16cb-6674-4029-b0d4-1e2be39b9f8d" (UID: "5abe16cb-6674-4029-b0d4-1e2be39b9f8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.925900 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-catalog-content\") pod \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.926023 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt54v\" (UniqueName: \"kubernetes.io/projected/f56d79f7-70e8-42d5-b5ef-d499328f80f2-kube-api-access-zt54v\") pod \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.926086 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-utilities\") pod \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\" (UID: \"f56d79f7-70e8-42d5-b5ef-d499328f80f2\") " Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.926388 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z2rv\" (UniqueName: \"kubernetes.io/projected/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-kube-api-access-9z2rv\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.926403 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.926414 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5abe16cb-6674-4029-b0d4-1e2be39b9f8d-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.927503 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-utilities" (OuterVolumeSpecName: "utilities") pod "f56d79f7-70e8-42d5-b5ef-d499328f80f2" (UID: "f56d79f7-70e8-42d5-b5ef-d499328f80f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.932417 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f56d79f7-70e8-42d5-b5ef-d499328f80f2-kube-api-access-zt54v" (OuterVolumeSpecName: "kube-api-access-zt54v") pod "f56d79f7-70e8-42d5-b5ef-d499328f80f2" (UID: "f56d79f7-70e8-42d5-b5ef-d499328f80f2"). InnerVolumeSpecName "kube-api-access-zt54v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.983836 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f56d79f7-70e8-42d5-b5ef-d499328f80f2" (UID: "f56d79f7-70e8-42d5-b5ef-d499328f80f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.991318 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bs5n" event={"ID":"f56d79f7-70e8-42d5-b5ef-d499328f80f2","Type":"ContainerDied","Data":"8f4d054ddefeac7f2a65df8a9f2bf017835c2fe490fd5710faae1898885fec4f"} Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.991372 4953 scope.go:117] "RemoveContainer" containerID="c29d336c7b528c421baf7c4e7321e4e77b031ef3829ee21ad9536f1c6b62d3a5" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.991378 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bs5n" Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.995704 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xslwr" event={"ID":"5abe16cb-6674-4029-b0d4-1e2be39b9f8d","Type":"ContainerDied","Data":"60ffd8f38d6250a5939413648f42c4aa561246689fa0e03db4f322e3b7358346"} Oct 11 02:50:07 crc kubenswrapper[4953]: I1011 02:50:07.995767 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xslwr" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.025317 4953 scope.go:117] "RemoveContainer" containerID="56b89c41e4e8afbb4d1afb9da0e6e402665a4a5b6349d5c4713aeb3eca07feac" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.029967 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt54v\" (UniqueName: \"kubernetes.io/projected/f56d79f7-70e8-42d5-b5ef-d499328f80f2-kube-api-access-zt54v\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.030023 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.030046 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f56d79f7-70e8-42d5-b5ef-d499328f80f2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.042305 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7bs5n"] Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.047907 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7bs5n"] Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.056697 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xslwr"] Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.059132 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xslwr"] Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.077800 4953 scope.go:117] "RemoveContainer" containerID="29d1a0f6df093dfc6e11837418ca18d1566d1d0f7990011a2e31a1bb8f8b58b8" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.095094 4953 scope.go:117] "RemoveContainer" containerID="b8dddd432718115aef2ccbfe5a65e65944fe997fd2af79a18f4a321809e96a54" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.112393 4953 scope.go:117] "RemoveContainer" containerID="6ef0f2b6b41707140348a18411fa9cd7b48fa1bcbbc7eb707c8c4007a24edccc" Oct 11 02:50:08 crc kubenswrapper[4953]: I1011 02:50:08.133689 4953 scope.go:117] "RemoveContainer" containerID="60fa699a6559700b9fb63faf5bd766bfafb5bc333c8365f0daabd3e7b3673113" Oct 11 02:50:09 crc kubenswrapper[4953]: I1011 02:50:09.808702 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" path="/var/lib/kubelet/pods/5abe16cb-6674-4029-b0d4-1e2be39b9f8d/volumes" Oct 11 02:50:09 crc kubenswrapper[4953]: I1011 02:50:09.811148 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" path="/var/lib/kubelet/pods/f56d79f7-70e8-42d5-b5ef-d499328f80f2/volumes" Oct 11 02:50:11 crc kubenswrapper[4953]: I1011 02:50:11.316876 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:50:11 crc kubenswrapper[4953]: I1011 02:50:11.316974 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:50:11 crc kubenswrapper[4953]: I1011 02:50:11.317041 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:50:11 crc kubenswrapper[4953]: I1011 02:50:11.317881 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 02:50:11 crc kubenswrapper[4953]: I1011 02:50:11.317953 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8" gracePeriod=600 Oct 11 02:50:14 crc kubenswrapper[4953]: I1011 02:50:14.054832 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8" exitCode=0 Oct 11 02:50:14 crc kubenswrapper[4953]: I1011 02:50:14.055027 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8"} Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.081457 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerStarted","Data":"51e2c3204aa38b49fe7b230182013ac9821ea3160a6fabe61c6d3e6d2690a608"} Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.087940 4953 generic.go:334] "Generic (PLEG): container finished" podID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerID="0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426" exitCode=0 Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.088121 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmjtp" event={"ID":"da2262ea-b5ad-41fd-b049-8f281a8a23ea","Type":"ContainerDied","Data":"0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426"} Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.096472 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerStarted","Data":"37a0fcc008df9040039af157a49a5dc337bd5cdb34038b7dfb8e24d552010ebf"} Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.102994 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"be926df4e6651562f1bd344a9c696ee29b5a5b7818c04fe489917ca678fb6665"} Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.110489 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zc8xb" podStartSLOduration=3.438450423 podStartE2EDuration="1m15.110465489s" podCreationTimestamp="2025-10-11 02:49:02 +0000 UTC" firstStartedPulling="2025-10-11 02:49:04.462114573 +0000 UTC m=+155.395202217" lastFinishedPulling="2025-10-11 02:50:16.134129639 +0000 UTC m=+227.067217283" observedRunningTime="2025-10-11 02:50:17.109307789 +0000 UTC m=+228.042395433" watchObservedRunningTime="2025-10-11 02:50:17.110465489 +0000 UTC m=+228.043553133" Oct 11 02:50:17 crc kubenswrapper[4953]: I1011 02:50:17.131302 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sf6k9" podStartSLOduration=2.98986868 podStartE2EDuration="1m15.131285576s" podCreationTimestamp="2025-10-11 02:49:02 +0000 UTC" firstStartedPulling="2025-10-11 02:49:04.450415202 +0000 UTC m=+155.383502846" lastFinishedPulling="2025-10-11 02:50:16.591832058 +0000 UTC m=+227.524919742" observedRunningTime="2025-10-11 02:50:17.128587167 +0000 UTC m=+228.061674841" watchObservedRunningTime="2025-10-11 02:50:17.131285576 +0000 UTC m=+228.064373220" Oct 11 02:50:18 crc kubenswrapper[4953]: I1011 02:50:18.112065 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmjtp" event={"ID":"da2262ea-b5ad-41fd-b049-8f281a8a23ea","Type":"ContainerStarted","Data":"52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3"} Oct 11 02:50:18 crc kubenswrapper[4953]: I1011 02:50:18.146818 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bmjtp" podStartSLOduration=1.664933872 podStartE2EDuration="1m17.146791687s" podCreationTimestamp="2025-10-11 02:49:01 +0000 UTC" firstStartedPulling="2025-10-11 02:49:02.352773188 +0000 UTC m=+153.285860852" lastFinishedPulling="2025-10-11 02:50:17.834631023 +0000 UTC m=+228.767718667" observedRunningTime="2025-10-11 02:50:18.142793264 +0000 UTC m=+229.075880948" watchObservedRunningTime="2025-10-11 02:50:18.146791687 +0000 UTC m=+229.079879361" Oct 11 02:50:21 crc kubenswrapper[4953]: I1011 02:50:21.162177 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5p48"] Oct 11 02:50:21 crc kubenswrapper[4953]: I1011 02:50:21.427099 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:50:21 crc kubenswrapper[4953]: I1011 02:50:21.427403 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:50:21 crc kubenswrapper[4953]: I1011 02:50:21.471626 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:50:22 crc kubenswrapper[4953]: I1011 02:50:22.183814 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:50:22 crc kubenswrapper[4953]: I1011 02:50:22.836501 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:50:22 crc kubenswrapper[4953]: I1011 02:50:22.836572 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:50:22 crc kubenswrapper[4953]: I1011 02:50:22.889433 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:50:23 crc kubenswrapper[4953]: I1011 02:50:23.219073 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:50:23 crc kubenswrapper[4953]: I1011 02:50:23.221044 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:50:23 crc kubenswrapper[4953]: I1011 02:50:23.237767 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:50:23 crc kubenswrapper[4953]: I1011 02:50:23.262228 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:50:24 crc kubenswrapper[4953]: I1011 02:50:24.224399 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:50:24 crc kubenswrapper[4953]: I1011 02:50:24.626734 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zc8xb"] Oct 11 02:50:26 crc kubenswrapper[4953]: I1011 02:50:26.177653 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zc8xb" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="registry-server" containerID="cri-o://51e2c3204aa38b49fe7b230182013ac9821ea3160a6fabe61c6d3e6d2690a608" gracePeriod=2 Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.193746 4953 generic.go:334] "Generic (PLEG): container finished" podID="346b5539-0462-42c3-a3c2-0670a7f59285" containerID="51e2c3204aa38b49fe7b230182013ac9821ea3160a6fabe61c6d3e6d2690a608" exitCode=0 Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.193855 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerDied","Data":"51e2c3204aa38b49fe7b230182013ac9821ea3160a6fabe61c6d3e6d2690a608"} Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.363942 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.505299 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x29k\" (UniqueName: \"kubernetes.io/projected/346b5539-0462-42c3-a3c2-0670a7f59285-kube-api-access-8x29k\") pod \"346b5539-0462-42c3-a3c2-0670a7f59285\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.505399 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-utilities\") pod \"346b5539-0462-42c3-a3c2-0670a7f59285\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.505519 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-catalog-content\") pod \"346b5539-0462-42c3-a3c2-0670a7f59285\" (UID: \"346b5539-0462-42c3-a3c2-0670a7f59285\") " Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.506215 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-utilities" (OuterVolumeSpecName: "utilities") pod "346b5539-0462-42c3-a3c2-0670a7f59285" (UID: "346b5539-0462-42c3-a3c2-0670a7f59285"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.509900 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346b5539-0462-42c3-a3c2-0670a7f59285-kube-api-access-8x29k" (OuterVolumeSpecName: "kube-api-access-8x29k") pod "346b5539-0462-42c3-a3c2-0670a7f59285" (UID: "346b5539-0462-42c3-a3c2-0670a7f59285"). InnerVolumeSpecName "kube-api-access-8x29k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.582303 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "346b5539-0462-42c3-a3c2-0670a7f59285" (UID: "346b5539-0462-42c3-a3c2-0670a7f59285"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.606821 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.606871 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/346b5539-0462-42c3-a3c2-0670a7f59285-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:27 crc kubenswrapper[4953]: I1011 02:50:27.606889 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x29k\" (UniqueName: \"kubernetes.io/projected/346b5539-0462-42c3-a3c2-0670a7f59285-kube-api-access-8x29k\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.201263 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zc8xb" event={"ID":"346b5539-0462-42c3-a3c2-0670a7f59285","Type":"ContainerDied","Data":"dd56983c18a9a592a528be399df6b611c45b3590146c87a8569b75611ea6cedf"} Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.202162 4953 scope.go:117] "RemoveContainer" containerID="51e2c3204aa38b49fe7b230182013ac9821ea3160a6fabe61c6d3e6d2690a608" Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.201319 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zc8xb" Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.218859 4953 scope.go:117] "RemoveContainer" containerID="5cb88f07d3f50c95973c5ec61ca9ac282c5690a10d2119f18e406dc496dd0222" Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.219591 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zc8xb"] Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.223255 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zc8xb"] Oct 11 02:50:28 crc kubenswrapper[4953]: I1011 02:50:28.230852 4953 scope.go:117] "RemoveContainer" containerID="d08d2daa6362697d1cf7d27d1dc1ae3c74506dc8a285be25493d9cc1235b413c" Oct 11 02:50:29 crc kubenswrapper[4953]: I1011 02:50:29.802084 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" path="/var/lib/kubelet/pods/346b5539-0462-42c3-a3c2-0670a7f59285/volumes" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.191222 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerName="oauth-openshift" containerID="cri-o://25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab" gracePeriod=15 Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.657318 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709342 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5f7896898-76fnw"] Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709689 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e8ba89-a863-49cf-a1c0-9060de6c7cfe" containerName="pruner" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709728 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e8ba89-a863-49cf-a1c0-9060de6c7cfe" containerName="pruner" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709754 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709771 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709789 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerName="oauth-openshift" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709806 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerName="oauth-openshift" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709840 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709857 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709882 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709897 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709921 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709936 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.709949 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.709961 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="extract-utilities" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710006 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710019 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710034 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29cc960-b47b-494a-81bd-617e97aed612" containerName="collect-profiles" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710046 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29cc960-b47b-494a-81bd-617e97aed612" containerName="collect-profiles" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710063 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710075 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710094 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710106 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710123 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710135 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710148 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710160 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710177 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710191 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710210 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710221 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="extract-content" Oct 11 02:50:46 crc kubenswrapper[4953]: E1011 02:50:46.710237 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="740d0184-00b9-40d4-b3ca-b09ef8f99141" containerName="pruner" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710250 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="740d0184-00b9-40d4-b3ca-b09ef8f99141" containerName="pruner" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710408 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="5abe16cb-6674-4029-b0d4-1e2be39b9f8d" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710428 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="346b5539-0462-42c3-a3c2-0670a7f59285" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710450 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29cc960-b47b-494a-81bd-617e97aed612" containerName="collect-profiles" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710468 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e8ba89-a863-49cf-a1c0-9060de6c7cfe" containerName="pruner" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710486 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerName="oauth-openshift" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710504 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5960496-4c63-4015-ac78-20c65d324cf0" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710522 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f56d79f7-70e8-42d5-b5ef-d499328f80f2" containerName="registry-server" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.710535 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="740d0184-00b9-40d4-b3ca-b09ef8f99141" containerName="pruner" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.711121 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.729046 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5f7896898-76fnw"] Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796174 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-service-ca\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796222 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-trusted-ca-bundle\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796269 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-serving-cert\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796329 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xlkl\" (UniqueName: \"kubernetes.io/projected/e05c4996-7333-41b0-b58d-8471886c9e2a-kube-api-access-5xlkl\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796359 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-policies\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796379 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-dir\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796472 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-login\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796495 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-idp-0-file-data\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796523 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-cliconfig\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796547 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-error\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796570 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-session\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796596 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-provider-selection\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796636 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-router-certs\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.796664 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-ocp-branding-template\") pod \"e05c4996-7333-41b0-b58d-8471886c9e2a\" (UID: \"e05c4996-7333-41b0-b58d-8471886c9e2a\") " Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.797075 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.797164 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.797179 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.797346 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.797378 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.802097 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.802626 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.802776 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.802879 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e05c4996-7333-41b0-b58d-8471886c9e2a-kube-api-access-5xlkl" (OuterVolumeSpecName: "kube-api-access-5xlkl") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "kube-api-access-5xlkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.803515 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.803864 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.804143 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.804231 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.812155 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e05c4996-7333-41b0-b58d-8471886c9e2a" (UID: "e05c4996-7333-41b0-b58d-8471886c9e2a"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898078 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-error\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898131 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-session\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898168 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7561d1ad-fed6-4bd7-8c57-986132743f62-audit-dir\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898216 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898276 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898305 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-service-ca\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898322 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898341 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlw8g\" (UniqueName: \"kubernetes.io/projected/7561d1ad-fed6-4bd7-8c57-986132743f62-kube-api-access-zlw8g\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898371 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-router-certs\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898399 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898422 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898449 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-audit-policies\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898472 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898492 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-login\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898539 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898550 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898560 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xlkl\" (UniqueName: \"kubernetes.io/projected/e05c4996-7333-41b0-b58d-8471886c9e2a-kube-api-access-5xlkl\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898570 4953 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898578 4953 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e05c4996-7333-41b0-b58d-8471886c9e2a-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898587 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898596 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898625 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898634 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898644 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898653 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898665 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898674 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.898684 4953 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e05c4996-7333-41b0-b58d-8471886c9e2a-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999480 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-audit-policies\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999561 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999688 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-login\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999801 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-error\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999858 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-session\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999901 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7561d1ad-fed6-4bd7-8c57-986132743f62-audit-dir\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:46 crc kubenswrapper[4953]: I1011 02:50:46.999941 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:46.999989 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000037 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-service-ca\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000071 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000105 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlw8g\" (UniqueName: \"kubernetes.io/projected/7561d1ad-fed6-4bd7-8c57-986132743f62-kube-api-access-zlw8g\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000147 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-router-certs\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000194 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000240 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000291 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7561d1ad-fed6-4bd7-8c57-986132743f62-audit-dir\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.000508 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-audit-policies\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.001207 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-service-ca\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.001719 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.001731 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.004548 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.005466 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.005709 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-login\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.005731 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-template-error\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.006501 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.006884 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-session\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.007131 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-router-certs\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.007175 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7561d1ad-fed6-4bd7-8c57-986132743f62-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.032334 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlw8g\" (UniqueName: \"kubernetes.io/projected/7561d1ad-fed6-4bd7-8c57-986132743f62-kube-api-access-zlw8g\") pod \"oauth-openshift-5f7896898-76fnw\" (UID: \"7561d1ad-fed6-4bd7-8c57-986132743f62\") " pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.325264 4953 generic.go:334] "Generic (PLEG): container finished" podID="e05c4996-7333-41b0-b58d-8471886c9e2a" containerID="25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab" exitCode=0 Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.325315 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" event={"ID":"e05c4996-7333-41b0-b58d-8471886c9e2a","Type":"ContainerDied","Data":"25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab"} Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.325346 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" event={"ID":"e05c4996-7333-41b0-b58d-8471886c9e2a","Type":"ContainerDied","Data":"06acd21c33d5a8a6defed7b98e44d3c60c2f79ceea2241002b21f2711a8371bf"} Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.325376 4953 scope.go:117] "RemoveContainer" containerID="25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.325498 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5p48" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.329527 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.390447 4953 scope.go:117] "RemoveContainer" containerID="25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab" Oct 11 02:50:47 crc kubenswrapper[4953]: E1011 02:50:47.390855 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab\": container with ID starting with 25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab not found: ID does not exist" containerID="25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.390884 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab"} err="failed to get container status \"25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab\": rpc error: code = NotFound desc = could not find container \"25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab\": container with ID starting with 25e5a6f81e8c1ccbf4f45168db4a661a0c84467d320d391e94b1938479c1c6ab not found: ID does not exist" Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.400723 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5p48"] Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.403234 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5p48"] Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.714788 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5f7896898-76fnw"] Oct 11 02:50:47 crc kubenswrapper[4953]: I1011 02:50:47.808925 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e05c4996-7333-41b0-b58d-8471886c9e2a" path="/var/lib/kubelet/pods/e05c4996-7333-41b0-b58d-8471886c9e2a/volumes" Oct 11 02:50:48 crc kubenswrapper[4953]: I1011 02:50:48.332157 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" event={"ID":"7561d1ad-fed6-4bd7-8c57-986132743f62","Type":"ContainerStarted","Data":"d617e1468a9530f041fabb16bf7cbbc4530b8be3cc873f945aa32f509685e993"} Oct 11 02:50:48 crc kubenswrapper[4953]: I1011 02:50:48.332217 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" event={"ID":"7561d1ad-fed6-4bd7-8c57-986132743f62","Type":"ContainerStarted","Data":"81f6c80ba022efbcdb2decb90c50ea73e7c574a836705c40a533b57190b6280f"} Oct 11 02:50:48 crc kubenswrapper[4953]: I1011 02:50:48.332352 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:48 crc kubenswrapper[4953]: I1011 02:50:48.356575 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" podStartSLOduration=27.356554196 podStartE2EDuration="27.356554196s" podCreationTimestamp="2025-10-11 02:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:50:48.355808637 +0000 UTC m=+259.288896291" watchObservedRunningTime="2025-10-11 02:50:48.356554196 +0000 UTC m=+259.289641850" Oct 11 02:50:48 crc kubenswrapper[4953]: I1011 02:50:48.580748 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5f7896898-76fnw" Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.894511 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b6569"] Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.895453 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-b6569" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="registry-server" containerID="cri-o://392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2" gracePeriod=30 Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.914854 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shv84"] Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.915235 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-shv84" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="registry-server" containerID="cri-o://3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6" gracePeriod=30 Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.918529 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8zbjh"] Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.918737 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerName="marketplace-operator" containerID="cri-o://7d99427daf2463cc3958d6288931f97a268a9df6e4d5d01084dd6b265b4216bb" gracePeriod=30 Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.931790 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmjtp"] Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.931996 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bmjtp" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="registry-server" containerID="cri-o://52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3" gracePeriod=30 Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.943826 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8tvwc"] Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.944917 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.984416 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sf6k9"] Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.984714 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sf6k9" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="registry-server" containerID="cri-o://37a0fcc008df9040039af157a49a5dc337bd5cdb34038b7dfb8e24d552010ebf" gracePeriod=30 Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.985498 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8dfc94ce-eb5a-4891-873a-3b44b9233e55-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.985529 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8dfc94ce-eb5a-4891-873a-3b44b9233e55-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.985559 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhchs\" (UniqueName: \"kubernetes.io/projected/8dfc94ce-eb5a-4891-873a-3b44b9233e55-kube-api-access-mhchs\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:58 crc kubenswrapper[4953]: I1011 02:50:58.987029 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8tvwc"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.088126 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8dfc94ce-eb5a-4891-873a-3b44b9233e55-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.088191 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8dfc94ce-eb5a-4891-873a-3b44b9233e55-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.088229 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhchs\" (UniqueName: \"kubernetes.io/projected/8dfc94ce-eb5a-4891-873a-3b44b9233e55-kube-api-access-mhchs\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.091978 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8dfc94ce-eb5a-4891-873a-3b44b9233e55-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.094285 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8dfc94ce-eb5a-4891-873a-3b44b9233e55-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.114274 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhchs\" (UniqueName: \"kubernetes.io/projected/8dfc94ce-eb5a-4891-873a-3b44b9233e55-kube-api-access-mhchs\") pod \"marketplace-operator-79b997595-8tvwc\" (UID: \"8dfc94ce-eb5a-4891-873a-3b44b9233e55\") " pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.321870 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.330652 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.392426 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-catalog-content\") pod \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.392525 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-utilities\") pod \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.392697 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2x9t\" (UniqueName: \"kubernetes.io/projected/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-kube-api-access-b2x9t\") pod \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\" (UID: \"ae65cc66-5a1f-4696-9414-1cd71ea4c36b\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.399028 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-kube-api-access-b2x9t" (OuterVolumeSpecName: "kube-api-access-b2x9t") pod "ae65cc66-5a1f-4696-9414-1cd71ea4c36b" (UID: "ae65cc66-5a1f-4696-9414-1cd71ea4c36b"). InnerVolumeSpecName "kube-api-access-b2x9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.400773 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.402026 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-utilities" (OuterVolumeSpecName: "utilities") pod "ae65cc66-5a1f-4696-9414-1cd71ea4c36b" (UID: "ae65cc66-5a1f-4696-9414-1cd71ea4c36b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.406347 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.407010 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.407827 4953 generic.go:334] "Generic (PLEG): container finished" podID="d491ebed-dc7a-41ac-8258-a21538878605" containerID="37a0fcc008df9040039af157a49a5dc337bd5cdb34038b7dfb8e24d552010ebf" exitCode=0 Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.407948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerDied","Data":"37a0fcc008df9040039af157a49a5dc337bd5cdb34038b7dfb8e24d552010ebf"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.426483 4953 generic.go:334] "Generic (PLEG): container finished" podID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerID="7d99427daf2463cc3958d6288931f97a268a9df6e4d5d01084dd6b265b4216bb" exitCode=0 Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.429184 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.429696 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8zbjh" event={"ID":"26d51156-6dc6-4d83-8d51-f2835538a3a9","Type":"ContainerDied","Data":"7d99427daf2463cc3958d6288931f97a268a9df6e4d5d01084dd6b265b4216bb"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.429773 4953 scope.go:117] "RemoveContainer" containerID="7d99427daf2463cc3958d6288931f97a268a9df6e4d5d01084dd6b265b4216bb" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.432695 4953 generic.go:334] "Generic (PLEG): container finished" podID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerID="392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2" exitCode=0 Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.432743 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerDied","Data":"392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.432761 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b6569" event={"ID":"ae65cc66-5a1f-4696-9414-1cd71ea4c36b","Type":"ContainerDied","Data":"7cefb6f4ad22135c990b7818417bca575dc51e95d11ac7f49c7c649035d5ddd2"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.432819 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b6569" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.435230 4953 generic.go:334] "Generic (PLEG): container finished" podID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerID="3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6" exitCode=0 Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.435265 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shv84" event={"ID":"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7","Type":"ContainerDied","Data":"3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.435284 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-shv84" event={"ID":"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7","Type":"ContainerDied","Data":"199ec72869d0269909b7183fa45099a8c5bda5d27d66b3f2958e8fb81620c945"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.435330 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-shv84" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.441927 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae65cc66-5a1f-4696-9414-1cd71ea4c36b" (UID: "ae65cc66-5a1f-4696-9414-1cd71ea4c36b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.456474 4953 generic.go:334] "Generic (PLEG): container finished" podID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerID="52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3" exitCode=0 Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.456523 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmjtp" event={"ID":"da2262ea-b5ad-41fd-b049-8f281a8a23ea","Type":"ContainerDied","Data":"52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.456552 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bmjtp" event={"ID":"da2262ea-b5ad-41fd-b049-8f281a8a23ea","Type":"ContainerDied","Data":"63e94a84e0dad9bfdd9b916ec033470e16179400795ead99b6c80a32a471423b"} Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.456646 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bmjtp" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.483889 4953 scope.go:117] "RemoveContainer" containerID="392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494156 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-catalog-content\") pod \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494193 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-utilities\") pod \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494236 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-operator-metrics\") pod \"26d51156-6dc6-4d83-8d51-f2835538a3a9\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494257 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrhhq\" (UniqueName: \"kubernetes.io/projected/26d51156-6dc6-4d83-8d51-f2835538a3a9-kube-api-access-zrhhq\") pod \"26d51156-6dc6-4d83-8d51-f2835538a3a9\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494280 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6cnz\" (UniqueName: \"kubernetes.io/projected/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-kube-api-access-j6cnz\") pod \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\" (UID: \"ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494299 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-trusted-ca\") pod \"26d51156-6dc6-4d83-8d51-f2835538a3a9\" (UID: \"26d51156-6dc6-4d83-8d51-f2835538a3a9\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494322 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-catalog-content\") pod \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494339 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-utilities\") pod \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494356 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l855v\" (UniqueName: \"kubernetes.io/projected/da2262ea-b5ad-41fd-b049-8f281a8a23ea-kube-api-access-l855v\") pod \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\" (UID: \"da2262ea-b5ad-41fd-b049-8f281a8a23ea\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494480 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2x9t\" (UniqueName: \"kubernetes.io/projected/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-kube-api-access-b2x9t\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494490 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.494499 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae65cc66-5a1f-4696-9414-1cd71ea4c36b-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.495009 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-utilities" (OuterVolumeSpecName: "utilities") pod "ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" (UID: "ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.497116 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "26d51156-6dc6-4d83-8d51-f2835538a3a9" (UID: "26d51156-6dc6-4d83-8d51-f2835538a3a9"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.498358 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-utilities" (OuterVolumeSpecName: "utilities") pod "da2262ea-b5ad-41fd-b049-8f281a8a23ea" (UID: "da2262ea-b5ad-41fd-b049-8f281a8a23ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.499364 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da2262ea-b5ad-41fd-b049-8f281a8a23ea-kube-api-access-l855v" (OuterVolumeSpecName: "kube-api-access-l855v") pod "da2262ea-b5ad-41fd-b049-8f281a8a23ea" (UID: "da2262ea-b5ad-41fd-b049-8f281a8a23ea"). InnerVolumeSpecName "kube-api-access-l855v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.500772 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-kube-api-access-j6cnz" (OuterVolumeSpecName: "kube-api-access-j6cnz") pod "ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" (UID: "ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7"). InnerVolumeSpecName "kube-api-access-j6cnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.501262 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26d51156-6dc6-4d83-8d51-f2835538a3a9-kube-api-access-zrhhq" (OuterVolumeSpecName: "kube-api-access-zrhhq") pod "26d51156-6dc6-4d83-8d51-f2835538a3a9" (UID: "26d51156-6dc6-4d83-8d51-f2835538a3a9"). InnerVolumeSpecName "kube-api-access-zrhhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.504077 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "26d51156-6dc6-4d83-8d51-f2835538a3a9" (UID: "26d51156-6dc6-4d83-8d51-f2835538a3a9"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.507120 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.508371 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da2262ea-b5ad-41fd-b049-8f281a8a23ea" (UID: "da2262ea-b5ad-41fd-b049-8f281a8a23ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.512233 4953 scope.go:117] "RemoveContainer" containerID="901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.544459 4953 scope.go:117] "RemoveContainer" containerID="025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.569593 4953 scope.go:117] "RemoveContainer" containerID="392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.571911 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2\": container with ID starting with 392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2 not found: ID does not exist" containerID="392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.571955 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2"} err="failed to get container status \"392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2\": rpc error: code = NotFound desc = could not find container \"392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2\": container with ID starting with 392b92ba55c91ee35d1634e97b3664233e3988796bc6be1a09a9a23dbbda67c2 not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.571986 4953 scope.go:117] "RemoveContainer" containerID="901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.572324 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193\": container with ID starting with 901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193 not found: ID does not exist" containerID="901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.572544 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193"} err="failed to get container status \"901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193\": rpc error: code = NotFound desc = could not find container \"901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193\": container with ID starting with 901e8c602f8f8e77ba37de286488af82a8522d3ec8d209294f9a53aec2633193 not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.572571 4953 scope.go:117] "RemoveContainer" containerID="025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.572901 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc\": container with ID starting with 025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc not found: ID does not exist" containerID="025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.572929 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc"} err="failed to get container status \"025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc\": rpc error: code = NotFound desc = could not find container \"025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc\": container with ID starting with 025bc77eb487a6fa1d0f10bc3a1e5cb0bafb694ff2a85fce650eaa06be4b3fbc not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.572946 4953 scope.go:117] "RemoveContainer" containerID="3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596082 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596146 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da2262ea-b5ad-41fd-b049-8f281a8a23ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596160 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l855v\" (UniqueName: \"kubernetes.io/projected/da2262ea-b5ad-41fd-b049-8f281a8a23ea-kube-api-access-l855v\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596172 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596186 4953 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596198 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrhhq\" (UniqueName: \"kubernetes.io/projected/26d51156-6dc6-4d83-8d51-f2835538a3a9-kube-api-access-zrhhq\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596209 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6cnz\" (UniqueName: \"kubernetes.io/projected/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-kube-api-access-j6cnz\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596219 4953 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/26d51156-6dc6-4d83-8d51-f2835538a3a9-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.596648 4953 scope.go:117] "RemoveContainer" containerID="7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.618183 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" (UID: "ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.635153 4953 scope.go:117] "RemoveContainer" containerID="33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.646887 4953 scope.go:117] "RemoveContainer" containerID="3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.647165 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6\": container with ID starting with 3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6 not found: ID does not exist" containerID="3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.647190 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6"} err="failed to get container status \"3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6\": rpc error: code = NotFound desc = could not find container \"3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6\": container with ID starting with 3bc576b270f2e01394dff36a1a53f061893749fb3bc88a395257cef08d4972f6 not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.647215 4953 scope.go:117] "RemoveContainer" containerID="7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.647463 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5\": container with ID starting with 7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5 not found: ID does not exist" containerID="7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.647501 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5"} err="failed to get container status \"7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5\": rpc error: code = NotFound desc = could not find container \"7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5\": container with ID starting with 7468e81dcab6180eef293faa2f3bb1694510b2116d65d76c7f8e9294818623a5 not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.647531 4953 scope.go:117] "RemoveContainer" containerID="33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.647783 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e\": container with ID starting with 33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e not found: ID does not exist" containerID="33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.647804 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e"} err="failed to get container status \"33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e\": rpc error: code = NotFound desc = could not find container \"33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e\": container with ID starting with 33f3ec9426a6db77b4af9099cc96842753ee5fd4253c37d3aeaea4efa7cd2c5e not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.647817 4953 scope.go:117] "RemoveContainer" containerID="52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.659133 4953 scope.go:117] "RemoveContainer" containerID="0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.673113 4953 scope.go:117] "RemoveContainer" containerID="f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.689066 4953 scope.go:117] "RemoveContainer" containerID="52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.689416 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3\": container with ID starting with 52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3 not found: ID does not exist" containerID="52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.689447 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3"} err="failed to get container status \"52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3\": rpc error: code = NotFound desc = could not find container \"52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3\": container with ID starting with 52b8579fb700c8f6eca3d9518fa09d741834c19fb79b53a7307eeee2671611f3 not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.689471 4953 scope.go:117] "RemoveContainer" containerID="0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.689724 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426\": container with ID starting with 0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426 not found: ID does not exist" containerID="0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.689744 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426"} err="failed to get container status \"0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426\": rpc error: code = NotFound desc = could not find container \"0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426\": container with ID starting with 0944bc7f67a4acd9b57b5b77f33020deed33ad1b595e906a67b01d7027cbb426 not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.689759 4953 scope.go:117] "RemoveContainer" containerID="f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae" Oct 11 02:50:59 crc kubenswrapper[4953]: E1011 02:50:59.689960 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae\": container with ID starting with f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae not found: ID does not exist" containerID="f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.689979 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae"} err="failed to get container status \"f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae\": rpc error: code = NotFound desc = could not find container \"f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae\": container with ID starting with f5844ab29854a3ef9c66abdea2de0e78618aa4855ce57f689d1f2353b26f5bae not found: ID does not exist" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.697793 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-utilities\") pod \"d491ebed-dc7a-41ac-8258-a21538878605\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.697822 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-catalog-content\") pod \"d491ebed-dc7a-41ac-8258-a21538878605\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.697862 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb7xk\" (UniqueName: \"kubernetes.io/projected/d491ebed-dc7a-41ac-8258-a21538878605-kube-api-access-mb7xk\") pod \"d491ebed-dc7a-41ac-8258-a21538878605\" (UID: \"d491ebed-dc7a-41ac-8258-a21538878605\") " Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.698004 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.699089 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-utilities" (OuterVolumeSpecName: "utilities") pod "d491ebed-dc7a-41ac-8258-a21538878605" (UID: "d491ebed-dc7a-41ac-8258-a21538878605"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.701562 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d491ebed-dc7a-41ac-8258-a21538878605-kube-api-access-mb7xk" (OuterVolumeSpecName: "kube-api-access-mb7xk") pod "d491ebed-dc7a-41ac-8258-a21538878605" (UID: "d491ebed-dc7a-41ac-8258-a21538878605"). InnerVolumeSpecName "kube-api-access-mb7xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.766999 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8zbjh"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.773170 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8zbjh"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.777977 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-shv84"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.779207 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d491ebed-dc7a-41ac-8258-a21538878605" (UID: "d491ebed-dc7a-41ac-8258-a21538878605"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.785285 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-shv84"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.788550 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b6569"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.793350 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-b6569"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.799008 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.799034 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d491ebed-dc7a-41ac-8258-a21538878605-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.799048 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb7xk\" (UniqueName: \"kubernetes.io/projected/d491ebed-dc7a-41ac-8258-a21538878605-kube-api-access-mb7xk\") on node \"crc\" DevicePath \"\"" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.813331 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" path="/var/lib/kubelet/pods/26d51156-6dc6-4d83-8d51-f2835538a3a9/volumes" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.814108 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" path="/var/lib/kubelet/pods/ae65cc66-5a1f-4696-9414-1cd71ea4c36b/volumes" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.815135 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" path="/var/lib/kubelet/pods/ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7/volumes" Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.818903 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmjtp"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.819011 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bmjtp"] Oct 11 02:50:59 crc kubenswrapper[4953]: I1011 02:50:59.834351 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8tvwc"] Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.464260 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" event={"ID":"8dfc94ce-eb5a-4891-873a-3b44b9233e55","Type":"ContainerStarted","Data":"e2608803525d2d4777ec7e434cb09c54c04b674b547885d107c8f275a452aa60"} Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.464314 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" event={"ID":"8dfc94ce-eb5a-4891-873a-3b44b9233e55","Type":"ContainerStarted","Data":"021d7e432e1a1e77f84a1917af8417f64e40e15626caa5d623cbbbb95d16a401"} Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.464702 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.466563 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sf6k9" event={"ID":"d491ebed-dc7a-41ac-8258-a21538878605","Type":"ContainerDied","Data":"a6f4bce7b6435cf7f47472cf30bc818b656567c630904f5e977ec494146b929b"} Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.466940 4953 scope.go:117] "RemoveContainer" containerID="37a0fcc008df9040039af157a49a5dc337bd5cdb34038b7dfb8e24d552010ebf" Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.466717 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sf6k9" Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.472101 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.480911 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8tvwc" podStartSLOduration=2.480882752 podStartE2EDuration="2.480882752s" podCreationTimestamp="2025-10-11 02:50:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:51:00.478565181 +0000 UTC m=+271.411652835" watchObservedRunningTime="2025-10-11 02:51:00.480882752 +0000 UTC m=+271.413970436" Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.492097 4953 scope.go:117] "RemoveContainer" containerID="8ad5ea18a7c37cb43dfada2857e4b93650c1feb48cf421db45956be88987420a" Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.517242 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sf6k9"] Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.517636 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sf6k9"] Oct 11 02:51:00 crc kubenswrapper[4953]: I1011 02:51:00.552715 4953 scope.go:117] "RemoveContainer" containerID="aa305cec237c47101d580519897e9bdf30460996b90666f5af2979d79929ba2c" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114511 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l2cxf"] Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114736 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerName="marketplace-operator" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114749 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerName="marketplace-operator" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114760 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114766 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114778 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114783 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114795 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114801 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114810 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114815 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114823 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114828 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114836 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114841 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114849 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114854 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114862 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114868 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114876 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114881 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114889 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114895 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="extract-utilities" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114904 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114911 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: E1011 02:51:01.114919 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114924 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="extract-content" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.114997 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d491ebed-dc7a-41ac-8258-a21538878605" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.115009 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="26d51156-6dc6-4d83-8d51-f2835538a3a9" containerName="marketplace-operator" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.115017 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae65cc66-5a1f-4696-9414-1cd71ea4c36b" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.115023 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.115033 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee17938b-d6d0-4ec9-a0e5-b58247a8d0d7" containerName="registry-server" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.115704 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.117590 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.118836 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f42776d-bcaa-480d-8a98-f48bec6b587a-utilities\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.119004 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mkx6\" (UniqueName: \"kubernetes.io/projected/6f42776d-bcaa-480d-8a98-f48bec6b587a-kube-api-access-6mkx6\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.119297 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f42776d-bcaa-480d-8a98-f48bec6b587a-catalog-content\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.121390 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l2cxf"] Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.219864 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f42776d-bcaa-480d-8a98-f48bec6b587a-catalog-content\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.219977 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f42776d-bcaa-480d-8a98-f48bec6b587a-utilities\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.220034 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mkx6\" (UniqueName: \"kubernetes.io/projected/6f42776d-bcaa-480d-8a98-f48bec6b587a-kube-api-access-6mkx6\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.220517 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f42776d-bcaa-480d-8a98-f48bec6b587a-catalog-content\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.220845 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f42776d-bcaa-480d-8a98-f48bec6b587a-utilities\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.252594 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mkx6\" (UniqueName: \"kubernetes.io/projected/6f42776d-bcaa-480d-8a98-f48bec6b587a-kube-api-access-6mkx6\") pod \"redhat-marketplace-l2cxf\" (UID: \"6f42776d-bcaa-480d-8a98-f48bec6b587a\") " pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.309454 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qn9wq"] Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.310911 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.312634 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.318648 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qn9wq"] Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.421672 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z4wb\" (UniqueName: \"kubernetes.io/projected/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-kube-api-access-5z4wb\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.422143 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-catalog-content\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.422331 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-utilities\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.479787 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.524162 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-catalog-content\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.524250 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-utilities\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.524292 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z4wb\" (UniqueName: \"kubernetes.io/projected/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-kube-api-access-5z4wb\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.525160 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-catalog-content\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.525250 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-utilities\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.547837 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z4wb\" (UniqueName: \"kubernetes.io/projected/f024ccc7-4e04-47b6-83ee-05da1aa42bfb-kube-api-access-5z4wb\") pod \"community-operators-qn9wq\" (UID: \"f024ccc7-4e04-47b6-83ee-05da1aa42bfb\") " pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.630720 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.655496 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l2cxf"] Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.802031 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d491ebed-dc7a-41ac-8258-a21538878605" path="/var/lib/kubelet/pods/d491ebed-dc7a-41ac-8258-a21538878605/volumes" Oct 11 02:51:01 crc kubenswrapper[4953]: I1011 02:51:01.803055 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da2262ea-b5ad-41fd-b049-8f281a8a23ea" path="/var/lib/kubelet/pods/da2262ea-b5ad-41fd-b049-8f281a8a23ea/volumes" Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.023894 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qn9wq"] Oct 11 02:51:02 crc kubenswrapper[4953]: W1011 02:51:02.028903 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf024ccc7_4e04_47b6_83ee_05da1aa42bfb.slice/crio-ad278548035def8e7b18a6e6faf15f15f7b392b5f845f982ffd2c12516736a77 WatchSource:0}: Error finding container ad278548035def8e7b18a6e6faf15f15f7b392b5f845f982ffd2c12516736a77: Status 404 returned error can't find the container with id ad278548035def8e7b18a6e6faf15f15f7b392b5f845f982ffd2c12516736a77 Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.490417 4953 generic.go:334] "Generic (PLEG): container finished" podID="6f42776d-bcaa-480d-8a98-f48bec6b587a" containerID="67583f4e892409bb7bdac71ec7a26ed1f2798ee4c945842cdf4bff39d2802d65" exitCode=0 Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.490472 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l2cxf" event={"ID":"6f42776d-bcaa-480d-8a98-f48bec6b587a","Type":"ContainerDied","Data":"67583f4e892409bb7bdac71ec7a26ed1f2798ee4c945842cdf4bff39d2802d65"} Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.490913 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l2cxf" event={"ID":"6f42776d-bcaa-480d-8a98-f48bec6b587a","Type":"ContainerStarted","Data":"1aa4badb3b70cb93f19092d46b1584d69ac561e6c2e8237a0e6273116092a393"} Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.496874 4953 generic.go:334] "Generic (PLEG): container finished" podID="f024ccc7-4e04-47b6-83ee-05da1aa42bfb" containerID="1e11bd9eea901c8a1f3883d058c6e720ba9c49ff26291eac8e0177c7345b5616" exitCode=0 Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.496952 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qn9wq" event={"ID":"f024ccc7-4e04-47b6-83ee-05da1aa42bfb","Type":"ContainerDied","Data":"1e11bd9eea901c8a1f3883d058c6e720ba9c49ff26291eac8e0177c7345b5616"} Oct 11 02:51:02 crc kubenswrapper[4953]: I1011 02:51:02.496973 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qn9wq" event={"ID":"f024ccc7-4e04-47b6-83ee-05da1aa42bfb","Type":"ContainerStarted","Data":"ad278548035def8e7b18a6e6faf15f15f7b392b5f845f982ffd2c12516736a77"} Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.509718 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8gl84"] Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.509808 4953 generic.go:334] "Generic (PLEG): container finished" podID="f024ccc7-4e04-47b6-83ee-05da1aa42bfb" containerID="a1e86d05aa9b7164e48d99449cd6294579386bb69850697fb0a00dfefcd2af3e" exitCode=0 Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.513029 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qn9wq" event={"ID":"f024ccc7-4e04-47b6-83ee-05da1aa42bfb","Type":"ContainerDied","Data":"a1e86d05aa9b7164e48d99449cd6294579386bb69850697fb0a00dfefcd2af3e"} Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.513132 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.514981 4953 generic.go:334] "Generic (PLEG): container finished" podID="6f42776d-bcaa-480d-8a98-f48bec6b587a" containerID="547c8b2956fd1d61f32b06551f5c97563f99ff999160d30131191b8bca632545" exitCode=0 Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.515027 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l2cxf" event={"ID":"6f42776d-bcaa-480d-8a98-f48bec6b587a","Type":"ContainerDied","Data":"547c8b2956fd1d61f32b06551f5c97563f99ff999160d30131191b8bca632545"} Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.520466 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gl84"] Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.551426 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.554985 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g4q7\" (UniqueName: \"kubernetes.io/projected/db5bd718-f43c-4c36-ba50-a6ac1853433f-kube-api-access-6g4q7\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.555035 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db5bd718-f43c-4c36-ba50-a6ac1853433f-utilities\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.555101 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db5bd718-f43c-4c36-ba50-a6ac1853433f-catalog-content\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.657574 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db5bd718-f43c-4c36-ba50-a6ac1853433f-catalog-content\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.657743 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g4q7\" (UniqueName: \"kubernetes.io/projected/db5bd718-f43c-4c36-ba50-a6ac1853433f-kube-api-access-6g4q7\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.657787 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db5bd718-f43c-4c36-ba50-a6ac1853433f-utilities\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.658256 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db5bd718-f43c-4c36-ba50-a6ac1853433f-catalog-content\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.658335 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db5bd718-f43c-4c36-ba50-a6ac1853433f-utilities\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.678911 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g4q7\" (UniqueName: \"kubernetes.io/projected/db5bd718-f43c-4c36-ba50-a6ac1853433f-kube-api-access-6g4q7\") pod \"certified-operators-8gl84\" (UID: \"db5bd718-f43c-4c36-ba50-a6ac1853433f\") " pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.713737 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2kt72"] Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.715799 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.720129 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.720523 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2kt72"] Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.759418 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-utilities\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.759487 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d28v2\" (UniqueName: \"kubernetes.io/projected/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-kube-api-access-d28v2\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.759542 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-catalog-content\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.860936 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-utilities\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.861000 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d28v2\" (UniqueName: \"kubernetes.io/projected/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-kube-api-access-d28v2\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.861032 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-catalog-content\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.861590 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-catalog-content\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.862764 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-utilities\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.883578 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:03 crc kubenswrapper[4953]: I1011 02:51:03.892745 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d28v2\" (UniqueName: \"kubernetes.io/projected/e9c8e1d7-cd5b-4b03-a755-61bf082e781e-kube-api-access-d28v2\") pod \"redhat-operators-2kt72\" (UID: \"e9c8e1d7-cd5b-4b03-a755-61bf082e781e\") " pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.042109 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.313255 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8gl84"] Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.451554 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2kt72"] Oct 11 02:51:04 crc kubenswrapper[4953]: W1011 02:51:04.457644 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9c8e1d7_cd5b_4b03_a755_61bf082e781e.slice/crio-2d46a6b8bc9d82aedf492bdfb8e884f81aeae0386250e77d83406d2b3e8fb68d WatchSource:0}: Error finding container 2d46a6b8bc9d82aedf492bdfb8e884f81aeae0386250e77d83406d2b3e8fb68d: Status 404 returned error can't find the container with id 2d46a6b8bc9d82aedf492bdfb8e884f81aeae0386250e77d83406d2b3e8fb68d Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.532442 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qn9wq" event={"ID":"f024ccc7-4e04-47b6-83ee-05da1aa42bfb","Type":"ContainerStarted","Data":"4b62bb52e08578762bc2699495a3b7b8d829e1a39ac3f6bad0f42cab54cff73d"} Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.534259 4953 generic.go:334] "Generic (PLEG): container finished" podID="db5bd718-f43c-4c36-ba50-a6ac1853433f" containerID="361add888c474e28b62baeca409c8be3911a9a020dffdb5be9eb198a6dc12465" exitCode=0 Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.534317 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gl84" event={"ID":"db5bd718-f43c-4c36-ba50-a6ac1853433f","Type":"ContainerDied","Data":"361add888c474e28b62baeca409c8be3911a9a020dffdb5be9eb198a6dc12465"} Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.534344 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gl84" event={"ID":"db5bd718-f43c-4c36-ba50-a6ac1853433f","Type":"ContainerStarted","Data":"bbf339675dcaa3aa708b2e78b3f7db0b3fc3ccc34018d1944f023508fa8aa3ea"} Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.538949 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l2cxf" event={"ID":"6f42776d-bcaa-480d-8a98-f48bec6b587a","Type":"ContainerStarted","Data":"8cf5461b476f6f3c201d12ab1b373ebe3703c9c57d807ed3f0b3f5ca1b8a1890"} Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.543308 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2kt72" event={"ID":"e9c8e1d7-cd5b-4b03-a755-61bf082e781e","Type":"ContainerStarted","Data":"2d46a6b8bc9d82aedf492bdfb8e884f81aeae0386250e77d83406d2b3e8fb68d"} Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.569865 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qn9wq" podStartSLOduration=2.090716643 podStartE2EDuration="3.569849147s" podCreationTimestamp="2025-10-11 02:51:01 +0000 UTC" firstStartedPulling="2025-10-11 02:51:02.49811452 +0000 UTC m=+273.431202164" lastFinishedPulling="2025-10-11 02:51:03.977247034 +0000 UTC m=+274.910334668" observedRunningTime="2025-10-11 02:51:04.55102444 +0000 UTC m=+275.484112094" watchObservedRunningTime="2025-10-11 02:51:04.569849147 +0000 UTC m=+275.502936791" Oct 11 02:51:04 crc kubenswrapper[4953]: I1011 02:51:04.570125 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l2cxf" podStartSLOduration=2.145131149 podStartE2EDuration="3.570122054s" podCreationTimestamp="2025-10-11 02:51:01 +0000 UTC" firstStartedPulling="2025-10-11 02:51:02.496320542 +0000 UTC m=+273.429408186" lastFinishedPulling="2025-10-11 02:51:03.921311427 +0000 UTC m=+274.854399091" observedRunningTime="2025-10-11 02:51:04.56920003 +0000 UTC m=+275.502287674" watchObservedRunningTime="2025-10-11 02:51:04.570122054 +0000 UTC m=+275.503209698" Oct 11 02:51:05 crc kubenswrapper[4953]: I1011 02:51:05.547684 4953 generic.go:334] "Generic (PLEG): container finished" podID="e9c8e1d7-cd5b-4b03-a755-61bf082e781e" containerID="0ee9cd2550874015e9743fa5931b623c77f2fe5fce4c9519965915748a7459c1" exitCode=0 Oct 11 02:51:05 crc kubenswrapper[4953]: I1011 02:51:05.547764 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2kt72" event={"ID":"e9c8e1d7-cd5b-4b03-a755-61bf082e781e","Type":"ContainerDied","Data":"0ee9cd2550874015e9743fa5931b623c77f2fe5fce4c9519965915748a7459c1"} Oct 11 02:51:05 crc kubenswrapper[4953]: I1011 02:51:05.552961 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gl84" event={"ID":"db5bd718-f43c-4c36-ba50-a6ac1853433f","Type":"ContainerStarted","Data":"d5a6459b8cb77ce432af1b223986e5697b38e61e6977a192309ba6ccfdc3bec5"} Oct 11 02:51:06 crc kubenswrapper[4953]: I1011 02:51:06.562106 4953 generic.go:334] "Generic (PLEG): container finished" podID="db5bd718-f43c-4c36-ba50-a6ac1853433f" containerID="d5a6459b8cb77ce432af1b223986e5697b38e61e6977a192309ba6ccfdc3bec5" exitCode=0 Oct 11 02:51:06 crc kubenswrapper[4953]: I1011 02:51:06.562278 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gl84" event={"ID":"db5bd718-f43c-4c36-ba50-a6ac1853433f","Type":"ContainerDied","Data":"d5a6459b8cb77ce432af1b223986e5697b38e61e6977a192309ba6ccfdc3bec5"} Oct 11 02:51:08 crc kubenswrapper[4953]: I1011 02:51:08.574082 4953 generic.go:334] "Generic (PLEG): container finished" podID="e9c8e1d7-cd5b-4b03-a755-61bf082e781e" containerID="455078ecea6d3dbdf9b2a8e91413454ebb04a620543d2307ebd706855b05baf7" exitCode=0 Oct 11 02:51:08 crc kubenswrapper[4953]: I1011 02:51:08.574169 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2kt72" event={"ID":"e9c8e1d7-cd5b-4b03-a755-61bf082e781e","Type":"ContainerDied","Data":"455078ecea6d3dbdf9b2a8e91413454ebb04a620543d2307ebd706855b05baf7"} Oct 11 02:51:08 crc kubenswrapper[4953]: I1011 02:51:08.579329 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8gl84" event={"ID":"db5bd718-f43c-4c36-ba50-a6ac1853433f","Type":"ContainerStarted","Data":"027b566acecadc76cce344f5a580fe0a74494318531c6f8e2faeeb52ac55958e"} Oct 11 02:51:08 crc kubenswrapper[4953]: I1011 02:51:08.625438 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8gl84" podStartSLOduration=3.156767657 podStartE2EDuration="5.62541214s" podCreationTimestamp="2025-10-11 02:51:03 +0000 UTC" firstStartedPulling="2025-10-11 02:51:04.536588819 +0000 UTC m=+275.469676463" lastFinishedPulling="2025-10-11 02:51:07.005233302 +0000 UTC m=+277.938320946" observedRunningTime="2025-10-11 02:51:08.62087726 +0000 UTC m=+279.553964904" watchObservedRunningTime="2025-10-11 02:51:08.62541214 +0000 UTC m=+279.558499824" Oct 11 02:51:09 crc kubenswrapper[4953]: I1011 02:51:09.586658 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2kt72" event={"ID":"e9c8e1d7-cd5b-4b03-a755-61bf082e781e","Type":"ContainerStarted","Data":"36e3ef6ae7759b52852f451c8496ae73627817555d33970ba4d7e4922840d8d4"} Oct 11 02:51:09 crc kubenswrapper[4953]: I1011 02:51:09.610274 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2kt72" podStartSLOduration=3.052096352 podStartE2EDuration="6.610257165s" podCreationTimestamp="2025-10-11 02:51:03 +0000 UTC" firstStartedPulling="2025-10-11 02:51:05.548845639 +0000 UTC m=+276.481933283" lastFinishedPulling="2025-10-11 02:51:09.107006452 +0000 UTC m=+280.040094096" observedRunningTime="2025-10-11 02:51:09.606459305 +0000 UTC m=+280.539546959" watchObservedRunningTime="2025-10-11 02:51:09.610257165 +0000 UTC m=+280.543344809" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.480713 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.480886 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.536898 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.631581 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.631659 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.658057 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l2cxf" Oct 11 02:51:11 crc kubenswrapper[4953]: I1011 02:51:11.699463 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:12 crc kubenswrapper[4953]: I1011 02:51:12.638494 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qn9wq" Oct 11 02:51:13 crc kubenswrapper[4953]: I1011 02:51:13.884266 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:13 crc kubenswrapper[4953]: I1011 02:51:13.884533 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:13 crc kubenswrapper[4953]: I1011 02:51:13.928653 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:14 crc kubenswrapper[4953]: I1011 02:51:14.043371 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:14 crc kubenswrapper[4953]: I1011 02:51:14.043426 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:14 crc kubenswrapper[4953]: I1011 02:51:14.101503 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:51:14 crc kubenswrapper[4953]: I1011 02:51:14.678776 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8gl84" Oct 11 02:51:14 crc kubenswrapper[4953]: I1011 02:51:14.681730 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2kt72" Oct 11 02:52:41 crc kubenswrapper[4953]: I1011 02:52:41.316562 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:52:41 crc kubenswrapper[4953]: I1011 02:52:41.317070 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:53:11 crc kubenswrapper[4953]: I1011 02:53:11.316329 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:53:11 crc kubenswrapper[4953]: I1011 02:53:11.317238 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.013248 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xr9pr"] Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.015970 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.037213 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xr9pr"] Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065364 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065429 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-registry-tls\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065456 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-bound-sa-token\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065490 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-registry-certificates\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065529 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065567 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q88r\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-kube-api-access-2q88r\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065593 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.065646 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-trusted-ca\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.100745 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167183 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-registry-tls\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167239 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-bound-sa-token\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167276 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-registry-certificates\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167330 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q88r\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-kube-api-access-2q88r\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167358 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167381 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-trusted-ca\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.167436 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.169215 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.169788 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-trusted-ca\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.170930 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-registry-certificates\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.175085 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.175827 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-registry-tls\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.183781 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-bound-sa-token\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.195929 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q88r\" (UniqueName: \"kubernetes.io/projected/c70ab219-f8a0-47b2-92d9-c8c46f0e8f54-kube-api-access-2q88r\") pod \"image-registry-66df7c8f76-xr9pr\" (UID: \"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54\") " pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.345120 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:16 crc kubenswrapper[4953]: I1011 02:53:16.557813 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xr9pr"] Oct 11 02:53:16 crc kubenswrapper[4953]: W1011 02:53:16.563898 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc70ab219_f8a0_47b2_92d9_c8c46f0e8f54.slice/crio-db02716eb657a80fa663a0cf38a6c1e0b9de21ec8f37215112f6ec0dc5bae70e WatchSource:0}: Error finding container db02716eb657a80fa663a0cf38a6c1e0b9de21ec8f37215112f6ec0dc5bae70e: Status 404 returned error can't find the container with id db02716eb657a80fa663a0cf38a6c1e0b9de21ec8f37215112f6ec0dc5bae70e Oct 11 02:53:17 crc kubenswrapper[4953]: I1011 02:53:17.370955 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" event={"ID":"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54","Type":"ContainerStarted","Data":"a1779a8e41975ffa9574457fc763dae5d22ce55e8681215a889b1d325423da0d"} Oct 11 02:53:17 crc kubenswrapper[4953]: I1011 02:53:17.371306 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" event={"ID":"c70ab219-f8a0-47b2-92d9-c8c46f0e8f54","Type":"ContainerStarted","Data":"db02716eb657a80fa663a0cf38a6c1e0b9de21ec8f37215112f6ec0dc5bae70e"} Oct 11 02:53:17 crc kubenswrapper[4953]: I1011 02:53:17.371321 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:17 crc kubenswrapper[4953]: I1011 02:53:17.388695 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" podStartSLOduration=2.388673902 podStartE2EDuration="2.388673902s" podCreationTimestamp="2025-10-11 02:53:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:53:17.386360972 +0000 UTC m=+408.319448616" watchObservedRunningTime="2025-10-11 02:53:17.388673902 +0000 UTC m=+408.321761556" Oct 11 02:53:36 crc kubenswrapper[4953]: I1011 02:53:36.353213 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-xr9pr" Oct 11 02:53:36 crc kubenswrapper[4953]: I1011 02:53:36.430154 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-svdgp"] Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.316237 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.316342 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.316423 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.317522 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be926df4e6651562f1bd344a9c696ee29b5a5b7818c04fe489917ca678fb6665"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.317659 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://be926df4e6651562f1bd344a9c696ee29b5a5b7818c04fe489917ca678fb6665" gracePeriod=600 Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.519097 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="be926df4e6651562f1bd344a9c696ee29b5a5b7818c04fe489917ca678fb6665" exitCode=0 Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.519537 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"be926df4e6651562f1bd344a9c696ee29b5a5b7818c04fe489917ca678fb6665"} Oct 11 02:53:41 crc kubenswrapper[4953]: I1011 02:53:41.519595 4953 scope.go:117] "RemoveContainer" containerID="0d52749224377e7e6c21e3dba9cba507257fb030dce077053e88e2bec53364d8" Oct 11 02:53:42 crc kubenswrapper[4953]: I1011 02:53:42.529584 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"0511f13731843b9979d221390934cd551a891a79ef8451ae3f30ad1f7c42593a"} Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.493810 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" podUID="56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" containerName="registry" containerID="cri-o://5ad8a68fc99249acf7196b2d01fba3f01c517a1f4454e9245f48a35fb8de6ea4" gracePeriod=30 Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.675843 4953 generic.go:334] "Generic (PLEG): container finished" podID="56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" containerID="5ad8a68fc99249acf7196b2d01fba3f01c517a1f4454e9245f48a35fb8de6ea4" exitCode=0 Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.676120 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" event={"ID":"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd","Type":"ContainerDied","Data":"5ad8a68fc99249acf7196b2d01fba3f01c517a1f4454e9245f48a35fb8de6ea4"} Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.855799 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991381 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-certificates\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991420 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-ca-trust-extracted\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991447 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcwbx\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-kube-api-access-gcwbx\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991482 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-tls\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991502 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-installation-pull-secrets\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991520 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-bound-sa-token\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991641 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.991671 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-trusted-ca\") pod \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\" (UID: \"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd\") " Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.993282 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:54:01 crc kubenswrapper[4953]: I1011 02:54:01.993498 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.001353 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-kube-api-access-gcwbx" (OuterVolumeSpecName: "kube-api-access-gcwbx") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "kube-api-access-gcwbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.003921 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.005173 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.005780 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.008090 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.011769 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" (UID: "56d33bde-ddb0-4a97-8c45-5df1f80cbdbd"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092822 4953 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092859 4953 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092871 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcwbx\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-kube-api-access-gcwbx\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092883 4953 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092896 4953 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092907 4953 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.092918 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.684305 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" event={"ID":"56d33bde-ddb0-4a97-8c45-5df1f80cbdbd","Type":"ContainerDied","Data":"d1c34714912d78f30be509a7c31b10da5a4626da36dfcc7b5625ce993d85e956"} Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.684627 4953 scope.go:117] "RemoveContainer" containerID="5ad8a68fc99249acf7196b2d01fba3f01c517a1f4454e9245f48a35fb8de6ea4" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.684369 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-svdgp" Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.724572 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-svdgp"] Oct 11 02:54:02 crc kubenswrapper[4953]: I1011 02:54:02.735192 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-svdgp"] Oct 11 02:54:03 crc kubenswrapper[4953]: I1011 02:54:03.820083 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" path="/var/lib/kubelet/pods/56d33bde-ddb0-4a97-8c45-5df1f80cbdbd/volumes" Oct 11 02:55:41 crc kubenswrapper[4953]: I1011 02:55:41.316950 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:55:41 crc kubenswrapper[4953]: I1011 02:55:41.317736 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.122506 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mmh22"] Oct 11 02:56:05 crc kubenswrapper[4953]: E1011 02:56:05.123452 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" containerName="registry" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.123468 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" containerName="registry" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.123579 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d33bde-ddb0-4a97-8c45-5df1f80cbdbd" containerName="registry" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.123997 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.126928 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.128144 4953 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-62fm7" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.130363 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.147788 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hvzfk"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.148855 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hvzfk" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.150310 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mmh22"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.151775 4953 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-ntfht" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.153089 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-zgb5r"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.154162 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.157475 4953 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-m75v7" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.160552 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-zgb5r"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.162962 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hvzfk"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.241067 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s9z4\" (UniqueName: \"kubernetes.io/projected/94a49212-5e10-4033-99db-7d55789632e3-kube-api-access-4s9z4\") pod \"cert-manager-webhook-5655c58dd6-zgb5r\" (UID: \"94a49212-5e10-4033-99db-7d55789632e3\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.241341 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lcrm\" (UniqueName: \"kubernetes.io/projected/0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10-kube-api-access-2lcrm\") pod \"cert-manager-cainjector-7f985d654d-mmh22\" (UID: \"0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.241431 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdffh\" (UniqueName: \"kubernetes.io/projected/4322ff87-c450-47d6-81e1-a5fb78efd7cb-kube-api-access-bdffh\") pod \"cert-manager-5b446d88c5-hvzfk\" (UID: \"4322ff87-c450-47d6-81e1-a5fb78efd7cb\") " pod="cert-manager/cert-manager-5b446d88c5-hvzfk" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.343362 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lcrm\" (UniqueName: \"kubernetes.io/projected/0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10-kube-api-access-2lcrm\") pod \"cert-manager-cainjector-7f985d654d-mmh22\" (UID: \"0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.343439 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdffh\" (UniqueName: \"kubernetes.io/projected/4322ff87-c450-47d6-81e1-a5fb78efd7cb-kube-api-access-bdffh\") pod \"cert-manager-5b446d88c5-hvzfk\" (UID: \"4322ff87-c450-47d6-81e1-a5fb78efd7cb\") " pod="cert-manager/cert-manager-5b446d88c5-hvzfk" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.343478 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s9z4\" (UniqueName: \"kubernetes.io/projected/94a49212-5e10-4033-99db-7d55789632e3-kube-api-access-4s9z4\") pod \"cert-manager-webhook-5655c58dd6-zgb5r\" (UID: \"94a49212-5e10-4033-99db-7d55789632e3\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.366051 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdffh\" (UniqueName: \"kubernetes.io/projected/4322ff87-c450-47d6-81e1-a5fb78efd7cb-kube-api-access-bdffh\") pod \"cert-manager-5b446d88c5-hvzfk\" (UID: \"4322ff87-c450-47d6-81e1-a5fb78efd7cb\") " pod="cert-manager/cert-manager-5b446d88c5-hvzfk" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.366088 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lcrm\" (UniqueName: \"kubernetes.io/projected/0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10-kube-api-access-2lcrm\") pod \"cert-manager-cainjector-7f985d654d-mmh22\" (UID: \"0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.371781 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s9z4\" (UniqueName: \"kubernetes.io/projected/94a49212-5e10-4033-99db-7d55789632e3-kube-api-access-4s9z4\") pod \"cert-manager-webhook-5655c58dd6-zgb5r\" (UID: \"94a49212-5e10-4033-99db-7d55789632e3\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.448750 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.463695 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hvzfk" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.474821 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.840666 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-zgb5r"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.854743 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.960751 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hvzfk"] Oct 11 02:56:05 crc kubenswrapper[4953]: I1011 02:56:05.963693 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-mmh22"] Oct 11 02:56:05 crc kubenswrapper[4953]: W1011 02:56:05.967773 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e0ba7b5_bed4_40fc_bc3e_82fab6e73d10.slice/crio-a6ba135e8a62fac6425aabdf0723ed4e5b0824aaa6c78a8116e3b414cff016a5 WatchSource:0}: Error finding container a6ba135e8a62fac6425aabdf0723ed4e5b0824aaa6c78a8116e3b414cff016a5: Status 404 returned error can't find the container with id a6ba135e8a62fac6425aabdf0723ed4e5b0824aaa6c78a8116e3b414cff016a5 Oct 11 02:56:05 crc kubenswrapper[4953]: W1011 02:56:05.969112 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4322ff87_c450_47d6_81e1_a5fb78efd7cb.slice/crio-d0bfd95c314d3e6630fbe76ec0e520b6a4b8d84342569d3a97f03c558d8906c4 WatchSource:0}: Error finding container d0bfd95c314d3e6630fbe76ec0e520b6a4b8d84342569d3a97f03c558d8906c4: Status 404 returned error can't find the container with id d0bfd95c314d3e6630fbe76ec0e520b6a4b8d84342569d3a97f03c558d8906c4 Oct 11 02:56:06 crc kubenswrapper[4953]: I1011 02:56:06.512237 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" event={"ID":"0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10","Type":"ContainerStarted","Data":"a6ba135e8a62fac6425aabdf0723ed4e5b0824aaa6c78a8116e3b414cff016a5"} Oct 11 02:56:06 crc kubenswrapper[4953]: I1011 02:56:06.516558 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hvzfk" event={"ID":"4322ff87-c450-47d6-81e1-a5fb78efd7cb","Type":"ContainerStarted","Data":"d0bfd95c314d3e6630fbe76ec0e520b6a4b8d84342569d3a97f03c558d8906c4"} Oct 11 02:56:06 crc kubenswrapper[4953]: I1011 02:56:06.520515 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" event={"ID":"94a49212-5e10-4033-99db-7d55789632e3","Type":"ContainerStarted","Data":"8ea2bd1f140b21b6f6c66d8ec2a87112c72917381b659bc863f104d00013032d"} Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.539162 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hvzfk" event={"ID":"4322ff87-c450-47d6-81e1-a5fb78efd7cb","Type":"ContainerStarted","Data":"9e0ad96160e696446a8a7ce669735c593543a01b008573c119e33a8933255341"} Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.541129 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" event={"ID":"94a49212-5e10-4033-99db-7d55789632e3","Type":"ContainerStarted","Data":"f4df31010d819be57a6bc674253e539f42def87bf690b9fcec1220c61e1c5dca"} Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.541536 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.543163 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" event={"ID":"0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10","Type":"ContainerStarted","Data":"6871389579e9776f5c7a01321b6fca9c0b2e3b54241b73a8df74ccffe339d51b"} Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.556008 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-hvzfk" podStartSLOduration=1.231799126 podStartE2EDuration="4.555995773s" podCreationTimestamp="2025-10-11 02:56:05 +0000 UTC" firstStartedPulling="2025-10-11 02:56:05.971819948 +0000 UTC m=+576.904907592" lastFinishedPulling="2025-10-11 02:56:09.296016565 +0000 UTC m=+580.229104239" observedRunningTime="2025-10-11 02:56:09.55471325 +0000 UTC m=+580.487800904" watchObservedRunningTime="2025-10-11 02:56:09.555995773 +0000 UTC m=+580.489083427" Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.623927 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-mmh22" podStartSLOduration=1.3736022700000001 podStartE2EDuration="4.623908438s" podCreationTimestamp="2025-10-11 02:56:05 +0000 UTC" firstStartedPulling="2025-10-11 02:56:05.970176016 +0000 UTC m=+576.903263670" lastFinishedPulling="2025-10-11 02:56:09.220482194 +0000 UTC m=+580.153569838" observedRunningTime="2025-10-11 02:56:09.621817214 +0000 UTC m=+580.554904868" watchObservedRunningTime="2025-10-11 02:56:09.623908438 +0000 UTC m=+580.556996092" Oct 11 02:56:09 crc kubenswrapper[4953]: I1011 02:56:09.643883 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" podStartSLOduration=1.278012465 podStartE2EDuration="4.643867791s" podCreationTimestamp="2025-10-11 02:56:05 +0000 UTC" firstStartedPulling="2025-10-11 02:56:05.854540626 +0000 UTC m=+576.787628270" lastFinishedPulling="2025-10-11 02:56:09.220395952 +0000 UTC m=+580.153483596" observedRunningTime="2025-10-11 02:56:09.640365871 +0000 UTC m=+580.573453515" watchObservedRunningTime="2025-10-11 02:56:09.643867791 +0000 UTC m=+580.576955435" Oct 11 02:56:11 crc kubenswrapper[4953]: I1011 02:56:11.316102 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:56:11 crc kubenswrapper[4953]: I1011 02:56:11.316501 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.479098 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-zgb5r" Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.806529 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7277g"] Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807000 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-controller" containerID="cri-o://0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807357 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="sbdb" containerID="cri-o://6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807412 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="nbdb" containerID="cri-o://dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807453 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="northd" containerID="cri-o://a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807493 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807533 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-node" containerID="cri-o://25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.807575 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-acl-logging" containerID="cri-o://03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" gracePeriod=30 Oct 11 02:56:15 crc kubenswrapper[4953]: I1011 02:56:15.866666 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" containerID="cri-o://7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" gracePeriod=30 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.099952 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/3.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.105858 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovn-acl-logging/0.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.106661 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovn-controller/0.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.107533 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172340 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f66j4"] Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172572 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172587 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172620 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kubecfg-setup" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172631 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kubecfg-setup" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172643 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172651 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172661 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-ovn-metrics" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172670 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-ovn-metrics" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172682 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="northd" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172690 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="northd" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172697 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="sbdb" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172705 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="sbdb" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172718 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172726 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172739 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="nbdb" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172748 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="nbdb" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172758 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172767 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172781 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172789 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172802 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-acl-logging" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172811 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-acl-logging" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172821 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-node" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172828 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-node" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.172841 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172849 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172971 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172987 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovn-acl-logging" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.172998 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-ovn-metrics" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173006 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173014 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173024 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="northd" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173032 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173042 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="kube-rbac-proxy-node" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173052 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="nbdb" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173063 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="sbdb" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173264 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.173277 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerName="ovnkube-controller" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.174994 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200658 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-script-lib\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200739 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-ovn-kubernetes\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200761 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-systemd\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200784 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-bin\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200809 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-kubelet\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200829 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmskm\" (UniqueName: \"kubernetes.io/projected/f390367a-136d-4992-a5a8-75d12ae2a94a-kube-api-access-dmskm\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200873 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f390367a-136d-4992-a5a8-75d12ae2a94a-ovn-node-metrics-cert\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200895 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-node-log\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200910 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-log-socket\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200899 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200929 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-netd\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200968 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-var-lib-openvswitch\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.200998 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-env-overrides\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201017 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201034 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-systemd-units\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201049 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-slash\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201067 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-ovn\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201085 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-config\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201101 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-openvswitch\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201115 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-netns\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201138 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-etc-openvswitch\") pod \"f390367a-136d-4992-a5a8-75d12ae2a94a\" (UID: \"f390367a-136d-4992-a5a8-75d12ae2a94a\") " Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201310 4953 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201358 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201679 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201781 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201827 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-node-log" (OuterVolumeSpecName: "node-log") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201867 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-log-socket" (OuterVolumeSpecName: "log-socket") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201905 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.201946 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.202502 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.202578 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.202632 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-slash" (OuterVolumeSpecName: "host-slash") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.202660 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.202723 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.202792 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.203283 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.203531 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.203576 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.212497 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f390367a-136d-4992-a5a8-75d12ae2a94a-kube-api-access-dmskm" (OuterVolumeSpecName: "kube-api-access-dmskm") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "kube-api-access-dmskm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.212632 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f390367a-136d-4992-a5a8-75d12ae2a94a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.217932 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f390367a-136d-4992-a5a8-75d12ae2a94a" (UID: "f390367a-136d-4992-a5a8-75d12ae2a94a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302581 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-etc-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302639 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovnkube-config\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302662 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-env-overrides\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302682 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-node-log\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302703 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovn-node-metrics-cert\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302724 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-run-ovn-kubernetes\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302739 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-cni-netd\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302797 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-ovn\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302813 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfbp9\" (UniqueName: \"kubernetes.io/projected/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-kube-api-access-bfbp9\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302830 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302853 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-log-socket\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.302955 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-slash\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303043 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303080 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-kubelet\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303126 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-systemd-units\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303158 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-run-netns\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303211 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-systemd\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303288 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-var-lib-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303379 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovnkube-script-lib\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303425 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-cni-bin\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303519 4953 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303545 4953 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303572 4953 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303598 4953 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-slash\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303653 4953 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303676 4953 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303694 4953 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303714 4953 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303731 4953 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303752 4953 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f390367a-136d-4992-a5a8-75d12ae2a94a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303771 4953 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303789 4953 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303807 4953 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303826 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmskm\" (UniqueName: \"kubernetes.io/projected/f390367a-136d-4992-a5a8-75d12ae2a94a-kube-api-access-dmskm\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303845 4953 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f390367a-136d-4992-a5a8-75d12ae2a94a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303863 4953 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-node-log\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303880 4953 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-log-socket\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303898 4953 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.303915 4953 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f390367a-136d-4992-a5a8-75d12ae2a94a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405291 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-kubelet\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405335 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-systemd-units\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405350 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-run-netns\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405370 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-systemd\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405394 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-var-lib-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405421 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovnkube-script-lib\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405439 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-cni-bin\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405457 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-etc-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405471 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovnkube-config\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405517 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-env-overrides\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405534 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-node-log\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405552 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovn-node-metrics-cert\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405567 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-run-ovn-kubernetes\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405581 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-cni-netd\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405622 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-ovn\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405637 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfbp9\" (UniqueName: \"kubernetes.io/projected/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-kube-api-access-bfbp9\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405652 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405673 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-slash\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405687 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-log-socket\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405702 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405771 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405811 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-kubelet\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405834 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-systemd-units\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405944 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-node-log\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405971 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-systemd\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405990 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-ovn\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405993 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-var-lib-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406107 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-etc-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406164 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-run-openvswitch\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406135 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-run-ovn-kubernetes\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406175 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-cni-bin\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406216 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-cni-netd\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406258 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-log-socket\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406273 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-slash\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406779 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovnkube-config\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406816 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-env-overrides\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.406997 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovnkube-script-lib\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.405854 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-host-run-netns\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.409729 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-ovn-node-metrics-cert\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.422006 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfbp9\" (UniqueName: \"kubernetes.io/projected/838b5d92-bfb1-4c14-8f1e-b46f1496dba2-kube-api-access-bfbp9\") pod \"ovnkube-node-f66j4\" (UID: \"838b5d92-bfb1-4c14-8f1e-b46f1496dba2\") " pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.491117 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.595505 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"0fea4d9fd7c3ca8612abd96ca8c8c9e71045380a60f24d035bead26d16447845"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.597993 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/2.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.598746 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/1.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.598839 4953 generic.go:334] "Generic (PLEG): container finished" podID="5a114089-658e-442c-b755-9ca9b127f368" containerID="8c53004c81306673421879569a8658cdf8f9544e30d13f1f6d2877014bd16313" exitCode=2 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.598953 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerDied","Data":"8c53004c81306673421879569a8658cdf8f9544e30d13f1f6d2877014bd16313"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.599063 4953 scope.go:117] "RemoveContainer" containerID="c8110302b5fad67513134cb198803343b687c51b3a161705a1d1bb5a00f5ed87" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.599741 4953 scope.go:117] "RemoveContainer" containerID="8c53004c81306673421879569a8658cdf8f9544e30d13f1f6d2877014bd16313" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.600057 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-t8zfg_openshift-multus(5a114089-658e-442c-b755-9ca9b127f368)\"" pod="openshift-multus/multus-t8zfg" podUID="5a114089-658e-442c-b755-9ca9b127f368" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.603669 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovnkube-controller/3.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.608788 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovn-acl-logging/0.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.609728 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-7277g_f390367a-136d-4992-a5a8-75d12ae2a94a/ovn-controller/0.log" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610506 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" exitCode=0 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610548 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" exitCode=0 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610571 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" exitCode=0 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610586 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" exitCode=0 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610635 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" exitCode=0 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610589 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610699 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610724 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610745 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610748 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610764 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.611797 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.612012 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.612183 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.612346 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.612497 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.612677 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.612854 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.613007 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.613156 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.613312 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.613524 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.610654 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" exitCode=0 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.613879 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" exitCode=143 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.614037 4953 generic.go:334] "Generic (PLEG): container finished" podID="f390367a-136d-4992-a5a8-75d12ae2a94a" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" exitCode=143 Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.613962 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.614367 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.614527 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.615151 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.615298 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.615431 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.615564 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.615757 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.615924 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.616078 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.616228 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.616585 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.616818 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.616965 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.617112 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.617321 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.617511 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.617686 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.617827 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.617960 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.618174 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.618526 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.618742 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7277g" event={"ID":"f390367a-136d-4992-a5a8-75d12ae2a94a","Type":"ContainerDied","Data":"fea333d76f7d5ff44f08fa16588a22989c4784b3693020aa652fe16748b1fd50"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.618915 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.619058 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.619203 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.619348 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.619481 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.619804 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.619957 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.620096 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.620232 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.620347 4953 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.644469 4953 scope.go:117] "RemoveContainer" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.665428 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.668735 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7277g"] Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.673968 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7277g"] Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.745773 4953 scope.go:117] "RemoveContainer" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.775680 4953 scope.go:117] "RemoveContainer" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.787261 4953 scope.go:117] "RemoveContainer" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.799320 4953 scope.go:117] "RemoveContainer" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.811194 4953 scope.go:117] "RemoveContainer" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.822735 4953 scope.go:117] "RemoveContainer" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.833623 4953 scope.go:117] "RemoveContainer" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.845805 4953 scope.go:117] "RemoveContainer" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.855797 4953 scope.go:117] "RemoveContainer" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.856043 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": container with ID starting with 7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41 not found: ID does not exist" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.856081 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} err="failed to get container status \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": rpc error: code = NotFound desc = could not find container \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": container with ID starting with 7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.856107 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.856471 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": container with ID starting with d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036 not found: ID does not exist" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.856499 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} err="failed to get container status \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": rpc error: code = NotFound desc = could not find container \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": container with ID starting with d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.856527 4953 scope.go:117] "RemoveContainer" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.856813 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": container with ID starting with 6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85 not found: ID does not exist" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.856841 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} err="failed to get container status \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": rpc error: code = NotFound desc = could not find container \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": container with ID starting with 6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.856861 4953 scope.go:117] "RemoveContainer" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.857195 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": container with ID starting with dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d not found: ID does not exist" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.857235 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} err="failed to get container status \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": rpc error: code = NotFound desc = could not find container \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": container with ID starting with dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.857293 4953 scope.go:117] "RemoveContainer" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.857731 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": container with ID starting with a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469 not found: ID does not exist" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.857758 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} err="failed to get container status \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": rpc error: code = NotFound desc = could not find container \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": container with ID starting with a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.857779 4953 scope.go:117] "RemoveContainer" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.858192 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": container with ID starting with 527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a not found: ID does not exist" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858212 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} err="failed to get container status \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": rpc error: code = NotFound desc = could not find container \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": container with ID starting with 527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858224 4953 scope.go:117] "RemoveContainer" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.858454 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": container with ID starting with 25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a not found: ID does not exist" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858484 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} err="failed to get container status \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": rpc error: code = NotFound desc = could not find container \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": container with ID starting with 25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858504 4953 scope.go:117] "RemoveContainer" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.858700 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": container with ID starting with 03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe not found: ID does not exist" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858720 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} err="failed to get container status \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": rpc error: code = NotFound desc = could not find container \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": container with ID starting with 03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858733 4953 scope.go:117] "RemoveContainer" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.858920 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": container with ID starting with 0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4 not found: ID does not exist" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858948 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} err="failed to get container status \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": rpc error: code = NotFound desc = could not find container \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": container with ID starting with 0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.858966 4953 scope.go:117] "RemoveContainer" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" Oct 11 02:56:16 crc kubenswrapper[4953]: E1011 02:56:16.859225 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": container with ID starting with 374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8 not found: ID does not exist" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.859251 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} err="failed to get container status \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": rpc error: code = NotFound desc = could not find container \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": container with ID starting with 374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.859266 4953 scope.go:117] "RemoveContainer" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.859557 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} err="failed to get container status \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": rpc error: code = NotFound desc = could not find container \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": container with ID starting with 7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.859579 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.859812 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} err="failed to get container status \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": rpc error: code = NotFound desc = could not find container \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": container with ID starting with d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.859835 4953 scope.go:117] "RemoveContainer" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860063 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} err="failed to get container status \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": rpc error: code = NotFound desc = could not find container \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": container with ID starting with 6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860094 4953 scope.go:117] "RemoveContainer" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860317 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} err="failed to get container status \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": rpc error: code = NotFound desc = could not find container \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": container with ID starting with dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860337 4953 scope.go:117] "RemoveContainer" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860513 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} err="failed to get container status \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": rpc error: code = NotFound desc = could not find container \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": container with ID starting with a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860537 4953 scope.go:117] "RemoveContainer" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860749 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} err="failed to get container status \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": rpc error: code = NotFound desc = could not find container \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": container with ID starting with 527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860766 4953 scope.go:117] "RemoveContainer" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.860996 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} err="failed to get container status \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": rpc error: code = NotFound desc = could not find container \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": container with ID starting with 25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.861017 4953 scope.go:117] "RemoveContainer" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.861253 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} err="failed to get container status \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": rpc error: code = NotFound desc = could not find container \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": container with ID starting with 03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.861385 4953 scope.go:117] "RemoveContainer" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862029 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} err="failed to get container status \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": rpc error: code = NotFound desc = could not find container \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": container with ID starting with 0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862051 4953 scope.go:117] "RemoveContainer" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862271 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} err="failed to get container status \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": rpc error: code = NotFound desc = could not find container \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": container with ID starting with 374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862294 4953 scope.go:117] "RemoveContainer" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862531 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} err="failed to get container status \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": rpc error: code = NotFound desc = could not find container \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": container with ID starting with 7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862560 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862865 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} err="failed to get container status \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": rpc error: code = NotFound desc = could not find container \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": container with ID starting with d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.862888 4953 scope.go:117] "RemoveContainer" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863130 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} err="failed to get container status \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": rpc error: code = NotFound desc = could not find container \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": container with ID starting with 6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863150 4953 scope.go:117] "RemoveContainer" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863366 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} err="failed to get container status \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": rpc error: code = NotFound desc = could not find container \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": container with ID starting with dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863406 4953 scope.go:117] "RemoveContainer" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863638 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} err="failed to get container status \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": rpc error: code = NotFound desc = could not find container \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": container with ID starting with a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863655 4953 scope.go:117] "RemoveContainer" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863863 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} err="failed to get container status \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": rpc error: code = NotFound desc = could not find container \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": container with ID starting with 527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.863890 4953 scope.go:117] "RemoveContainer" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.864333 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} err="failed to get container status \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": rpc error: code = NotFound desc = could not find container \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": container with ID starting with 25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.864356 4953 scope.go:117] "RemoveContainer" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.864596 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} err="failed to get container status \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": rpc error: code = NotFound desc = could not find container \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": container with ID starting with 03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.864628 4953 scope.go:117] "RemoveContainer" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.864824 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} err="failed to get container status \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": rpc error: code = NotFound desc = could not find container \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": container with ID starting with 0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.864847 4953 scope.go:117] "RemoveContainer" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865073 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} err="failed to get container status \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": rpc error: code = NotFound desc = could not find container \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": container with ID starting with 374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865107 4953 scope.go:117] "RemoveContainer" containerID="7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865404 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41"} err="failed to get container status \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": rpc error: code = NotFound desc = could not find container \"7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41\": container with ID starting with 7df0702f05e1fc4bcf36695eb2f12040fdd90baabb0f2949c90e401d020ebe41 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865423 4953 scope.go:117] "RemoveContainer" containerID="d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865634 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036"} err="failed to get container status \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": rpc error: code = NotFound desc = could not find container \"d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036\": container with ID starting with d95fb001db684d496f424d51b2e795f4b173a4443e88d2a9ad7b86b63489e036 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865654 4953 scope.go:117] "RemoveContainer" containerID="6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865829 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85"} err="failed to get container status \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": rpc error: code = NotFound desc = could not find container \"6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85\": container with ID starting with 6d38ed7c436cb3de8a17348be9f64c568315a7c8d2163316c6de2a0e215cdc85 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.865847 4953 scope.go:117] "RemoveContainer" containerID="dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.866125 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d"} err="failed to get container status \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": rpc error: code = NotFound desc = could not find container \"dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d\": container with ID starting with dec3b71f3ebd25e1a0feb5f87cba7590caa98c3a977f9055c07f597cc9956e7d not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.866151 4953 scope.go:117] "RemoveContainer" containerID="a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.866432 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469"} err="failed to get container status \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": rpc error: code = NotFound desc = could not find container \"a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469\": container with ID starting with a8650d13c69901990ec76ce602480d83d2f017e951110b26c050074dfa43d469 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.866513 4953 scope.go:117] "RemoveContainer" containerID="527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.866862 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a"} err="failed to get container status \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": rpc error: code = NotFound desc = could not find container \"527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a\": container with ID starting with 527d1e183067a732b2dde2e8edfddf2fa4d44d39ea70b967df429bac22dfe86a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.866882 4953 scope.go:117] "RemoveContainer" containerID="25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.867143 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a"} err="failed to get container status \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": rpc error: code = NotFound desc = could not find container \"25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a\": container with ID starting with 25e300328df12004ca51e5011a98462a79f43ef382d4a2310c9f8f2ea9dbd74a not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.867198 4953 scope.go:117] "RemoveContainer" containerID="03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.867519 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe"} err="failed to get container status \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": rpc error: code = NotFound desc = could not find container \"03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe\": container with ID starting with 03eb75610a62cfcab91bc9a76e9d2cd3246d4ec0d0479b881ea11b7b985a6cfe not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.867546 4953 scope.go:117] "RemoveContainer" containerID="0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.867757 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4"} err="failed to get container status \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": rpc error: code = NotFound desc = could not find container \"0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4\": container with ID starting with 0c49761125b74fce41d757de607c104217b67342069e2cfe306560eb7f4170d4 not found: ID does not exist" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.867777 4953 scope.go:117] "RemoveContainer" containerID="374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8" Oct 11 02:56:16 crc kubenswrapper[4953]: I1011 02:56:16.868054 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8"} err="failed to get container status \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": rpc error: code = NotFound desc = could not find container \"374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8\": container with ID starting with 374a6409227451ac30323891a631ec757d1e796ebfa8e88cdd9b4141055570d8 not found: ID does not exist" Oct 11 02:56:17 crc kubenswrapper[4953]: I1011 02:56:17.623500 4953 generic.go:334] "Generic (PLEG): container finished" podID="838b5d92-bfb1-4c14-8f1e-b46f1496dba2" containerID="f3da6c2c534d6f367c88c2b8b8ad2ab68310425c688b3c80a1a8d89885b5d9dd" exitCode=0 Oct 11 02:56:17 crc kubenswrapper[4953]: I1011 02:56:17.623679 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerDied","Data":"f3da6c2c534d6f367c88c2b8b8ad2ab68310425c688b3c80a1a8d89885b5d9dd"} Oct 11 02:56:17 crc kubenswrapper[4953]: I1011 02:56:17.627478 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/2.log" Oct 11 02:56:17 crc kubenswrapper[4953]: I1011 02:56:17.805676 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f390367a-136d-4992-a5a8-75d12ae2a94a" path="/var/lib/kubelet/pods/f390367a-136d-4992-a5a8-75d12ae2a94a/volumes" Oct 11 02:56:18 crc kubenswrapper[4953]: I1011 02:56:18.640138 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"668e7481f5ff7b1ce2d0cc26037414e3b143a5c51092c4d6d27e16ebad51bb34"} Oct 11 02:56:18 crc kubenswrapper[4953]: I1011 02:56:18.640564 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"a6754d9d8a0690cebab880c0f12d2d789565c700bb61fdef506a679f87ac845d"} Oct 11 02:56:18 crc kubenswrapper[4953]: I1011 02:56:18.640581 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"5975ab65baec8b696ead9818095632fbeecaee1d16a4a0fa2b0149063451d53f"} Oct 11 02:56:18 crc kubenswrapper[4953]: I1011 02:56:18.640597 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"b8e5ff0fc657052a71f7fb49c2b577ecd16a006f3bfc1f7302aa5be18bedd105"} Oct 11 02:56:18 crc kubenswrapper[4953]: I1011 02:56:18.640642 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"c7e2e7e8ef89de5e319294b9236d8a54c525c0804397604fa17e7a9ba5b81ac2"} Oct 11 02:56:18 crc kubenswrapper[4953]: I1011 02:56:18.640656 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"4ac0ec09fde4c12448b7294f0efff2930159b6c188fb888c34b641f3813480f3"} Oct 11 02:56:20 crc kubenswrapper[4953]: I1011 02:56:20.652092 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"4f4f89a63d6981021dbce515d3c1a959d00f98e54c04d448c7ff2803a1347d3b"} Oct 11 02:56:23 crc kubenswrapper[4953]: I1011 02:56:23.678476 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" event={"ID":"838b5d92-bfb1-4c14-8f1e-b46f1496dba2","Type":"ContainerStarted","Data":"6fcc85e08d70e4320c512074c17ab9d2083f40f342a78797e4da51a8af865d0c"} Oct 11 02:56:23 crc kubenswrapper[4953]: I1011 02:56:23.679108 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:23 crc kubenswrapper[4953]: I1011 02:56:23.711731 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:23 crc kubenswrapper[4953]: I1011 02:56:23.716667 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" podStartSLOduration=7.716645258 podStartE2EDuration="7.716645258s" podCreationTimestamp="2025-10-11 02:56:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:56:23.712308757 +0000 UTC m=+594.645396421" watchObservedRunningTime="2025-10-11 02:56:23.716645258 +0000 UTC m=+594.649732912" Oct 11 02:56:24 crc kubenswrapper[4953]: I1011 02:56:24.685867 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:24 crc kubenswrapper[4953]: I1011 02:56:24.685951 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:24 crc kubenswrapper[4953]: I1011 02:56:24.787857 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:29 crc kubenswrapper[4953]: I1011 02:56:29.801935 4953 scope.go:117] "RemoveContainer" containerID="8c53004c81306673421879569a8658cdf8f9544e30d13f1f6d2877014bd16313" Oct 11 02:56:29 crc kubenswrapper[4953]: E1011 02:56:29.802892 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-t8zfg_openshift-multus(5a114089-658e-442c-b755-9ca9b127f368)\"" pod="openshift-multus/multus-t8zfg" podUID="5a114089-658e-442c-b755-9ca9b127f368" Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.317038 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.317861 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.317955 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.319071 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0511f13731843b9979d221390934cd551a891a79ef8451ae3f30ad1f7c42593a"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.319219 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://0511f13731843b9979d221390934cd551a891a79ef8451ae3f30ad1f7c42593a" gracePeriod=600 Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.806530 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="0511f13731843b9979d221390934cd551a891a79ef8451ae3f30ad1f7c42593a" exitCode=0 Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.806653 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"0511f13731843b9979d221390934cd551a891a79ef8451ae3f30ad1f7c42593a"} Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.807178 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"ac4cee146fbc1ec5589f7b5615f6ba984aa69377969ce0ce9a9209a5d1c58d45"} Oct 11 02:56:41 crc kubenswrapper[4953]: I1011 02:56:41.807209 4953 scope.go:117] "RemoveContainer" containerID="be926df4e6651562f1bd344a9c696ee29b5a5b7818c04fe489917ca678fb6665" Oct 11 02:56:42 crc kubenswrapper[4953]: I1011 02:56:42.795722 4953 scope.go:117] "RemoveContainer" containerID="8c53004c81306673421879569a8658cdf8f9544e30d13f1f6d2877014bd16313" Oct 11 02:56:43 crc kubenswrapper[4953]: I1011 02:56:43.827290 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t8zfg_5a114089-658e-442c-b755-9ca9b127f368/kube-multus/2.log" Oct 11 02:56:43 crc kubenswrapper[4953]: I1011 02:56:43.827738 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t8zfg" event={"ID":"5a114089-658e-442c-b755-9ca9b127f368","Type":"ContainerStarted","Data":"67acecc228186f2d8a23c617c96afeab02991c89e11c21080d4f2d5111339cbe"} Oct 11 02:56:46 crc kubenswrapper[4953]: I1011 02:56:46.526747 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f66j4" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.069165 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l"] Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.070852 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.073007 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.089417 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l"] Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.160318 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.160419 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.160462 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b2lv\" (UniqueName: \"kubernetes.io/projected/b734ba5e-4396-4913-9cb3-fe335edce58f-kube-api-access-2b2lv\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.261743 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.261878 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.261936 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b2lv\" (UniqueName: \"kubernetes.io/projected/b734ba5e-4396-4913-9cb3-fe335edce58f-kube-api-access-2b2lv\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.263290 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.263324 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.286823 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b2lv\" (UniqueName: \"kubernetes.io/projected/b734ba5e-4396-4913-9cb3-fe335edce58f-kube-api-access-2b2lv\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.399490 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.654072 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l"] Oct 11 02:56:56 crc kubenswrapper[4953]: W1011 02:56:56.663841 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb734ba5e_4396_4913_9cb3_fe335edce58f.slice/crio-a6ab08102bf009699658017ebcf9d0c8b5e62c2daeb18889ceaf09d0d76e37d1 WatchSource:0}: Error finding container a6ab08102bf009699658017ebcf9d0c8b5e62c2daeb18889ceaf09d0d76e37d1: Status 404 returned error can't find the container with id a6ab08102bf009699658017ebcf9d0c8b5e62c2daeb18889ceaf09d0d76e37d1 Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.906004 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" event={"ID":"b734ba5e-4396-4913-9cb3-fe335edce58f","Type":"ContainerStarted","Data":"a3d5c75dc44c167a22076b620cb793ec08c0afe96b286db71b747011cabed285"} Oct 11 02:56:56 crc kubenswrapper[4953]: I1011 02:56:56.906044 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" event={"ID":"b734ba5e-4396-4913-9cb3-fe335edce58f","Type":"ContainerStarted","Data":"a6ab08102bf009699658017ebcf9d0c8b5e62c2daeb18889ceaf09d0d76e37d1"} Oct 11 02:56:57 crc kubenswrapper[4953]: I1011 02:56:57.920347 4953 generic.go:334] "Generic (PLEG): container finished" podID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerID="a3d5c75dc44c167a22076b620cb793ec08c0afe96b286db71b747011cabed285" exitCode=0 Oct 11 02:56:57 crc kubenswrapper[4953]: I1011 02:56:57.920621 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" event={"ID":"b734ba5e-4396-4913-9cb3-fe335edce58f","Type":"ContainerDied","Data":"a3d5c75dc44c167a22076b620cb793ec08c0afe96b286db71b747011cabed285"} Oct 11 02:56:59 crc kubenswrapper[4953]: I1011 02:56:59.935865 4953 generic.go:334] "Generic (PLEG): container finished" podID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerID="ab95128f858eca793e9397aa98ef1228ad3c02839d7c6cfbc8d3e13d513d54a3" exitCode=0 Oct 11 02:56:59 crc kubenswrapper[4953]: I1011 02:56:59.935990 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" event={"ID":"b734ba5e-4396-4913-9cb3-fe335edce58f","Type":"ContainerDied","Data":"ab95128f858eca793e9397aa98ef1228ad3c02839d7c6cfbc8d3e13d513d54a3"} Oct 11 02:57:00 crc kubenswrapper[4953]: I1011 02:57:00.945813 4953 generic.go:334] "Generic (PLEG): container finished" podID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerID="e12c6919d35b5e6354ac5ff6c8699c0dd446874f57f88541442de404a0cef954" exitCode=0 Oct 11 02:57:00 crc kubenswrapper[4953]: I1011 02:57:00.945878 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" event={"ID":"b734ba5e-4396-4913-9cb3-fe335edce58f","Type":"ContainerDied","Data":"e12c6919d35b5e6354ac5ff6c8699c0dd446874f57f88541442de404a0cef954"} Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.233357 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.356965 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-util\") pod \"b734ba5e-4396-4913-9cb3-fe335edce58f\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.357145 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-bundle\") pod \"b734ba5e-4396-4913-9cb3-fe335edce58f\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.357249 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b2lv\" (UniqueName: \"kubernetes.io/projected/b734ba5e-4396-4913-9cb3-fe335edce58f-kube-api-access-2b2lv\") pod \"b734ba5e-4396-4913-9cb3-fe335edce58f\" (UID: \"b734ba5e-4396-4913-9cb3-fe335edce58f\") " Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.358293 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-bundle" (OuterVolumeSpecName: "bundle") pod "b734ba5e-4396-4913-9cb3-fe335edce58f" (UID: "b734ba5e-4396-4913-9cb3-fe335edce58f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.362097 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b734ba5e-4396-4913-9cb3-fe335edce58f-kube-api-access-2b2lv" (OuterVolumeSpecName: "kube-api-access-2b2lv") pod "b734ba5e-4396-4913-9cb3-fe335edce58f" (UID: "b734ba5e-4396-4913-9cb3-fe335edce58f"). InnerVolumeSpecName "kube-api-access-2b2lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.458743 4953 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.458801 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b2lv\" (UniqueName: \"kubernetes.io/projected/b734ba5e-4396-4913-9cb3-fe335edce58f-kube-api-access-2b2lv\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.620796 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-util" (OuterVolumeSpecName: "util") pod "b734ba5e-4396-4913-9cb3-fe335edce58f" (UID: "b734ba5e-4396-4913-9cb3-fe335edce58f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.661333 4953 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b734ba5e-4396-4913-9cb3-fe335edce58f-util\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.969308 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" event={"ID":"b734ba5e-4396-4913-9cb3-fe335edce58f","Type":"ContainerDied","Data":"a6ab08102bf009699658017ebcf9d0c8b5e62c2daeb18889ceaf09d0d76e37d1"} Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.969367 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6ab08102bf009699658017ebcf9d0c8b5e62c2daeb18889ceaf09d0d76e37d1" Oct 11 02:57:02 crc kubenswrapper[4953]: I1011 02:57:02.969443 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.668937 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v"] Oct 11 02:57:04 crc kubenswrapper[4953]: E1011 02:57:04.669368 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="extract" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.669379 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="extract" Oct 11 02:57:04 crc kubenswrapper[4953]: E1011 02:57:04.669393 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="util" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.669399 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="util" Oct 11 02:57:04 crc kubenswrapper[4953]: E1011 02:57:04.669410 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="pull" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.669416 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="pull" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.669513 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b734ba5e-4396-4913-9cb3-fe335edce58f" containerName="extract" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.669861 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.671973 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.672036 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.672231 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-cg7j9" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.685075 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v"] Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.790431 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjfqt\" (UniqueName: \"kubernetes.io/projected/37deec98-39ac-4e7d-80dd-5dd431108ea8-kube-api-access-qjfqt\") pod \"nmstate-operator-858ddd8f98-b5h2v\" (UID: \"37deec98-39ac-4e7d-80dd-5dd431108ea8\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.892056 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjfqt\" (UniqueName: \"kubernetes.io/projected/37deec98-39ac-4e7d-80dd-5dd431108ea8-kube-api-access-qjfqt\") pod \"nmstate-operator-858ddd8f98-b5h2v\" (UID: \"37deec98-39ac-4e7d-80dd-5dd431108ea8\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.913412 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjfqt\" (UniqueName: \"kubernetes.io/projected/37deec98-39ac-4e7d-80dd-5dd431108ea8-kube-api-access-qjfqt\") pod \"nmstate-operator-858ddd8f98-b5h2v\" (UID: \"37deec98-39ac-4e7d-80dd-5dd431108ea8\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" Oct 11 02:57:04 crc kubenswrapper[4953]: I1011 02:57:04.985348 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" Oct 11 02:57:05 crc kubenswrapper[4953]: I1011 02:57:05.396067 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v"] Oct 11 02:57:05 crc kubenswrapper[4953]: I1011 02:57:05.988986 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" event={"ID":"37deec98-39ac-4e7d-80dd-5dd431108ea8","Type":"ContainerStarted","Data":"9d60a6607f0cf86dddf25254f35d4a306b860137414ba0654a6938c35fe57ecc"} Oct 11 02:57:08 crc kubenswrapper[4953]: I1011 02:57:08.003543 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" event={"ID":"37deec98-39ac-4e7d-80dd-5dd431108ea8","Type":"ContainerStarted","Data":"ff2cf1b88a43b96695130e742c6397f2954e95fbb54e92889a1e7da1d3e9b73d"} Oct 11 02:57:08 crc kubenswrapper[4953]: I1011 02:57:08.020778 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-b5h2v" podStartSLOduration=2.041893247 podStartE2EDuration="4.020753161s" podCreationTimestamp="2025-10-11 02:57:04 +0000 UTC" firstStartedPulling="2025-10-11 02:57:05.409522386 +0000 UTC m=+636.342610040" lastFinishedPulling="2025-10-11 02:57:07.38838231 +0000 UTC m=+638.321469954" observedRunningTime="2025-10-11 02:57:08.020669928 +0000 UTC m=+638.953757612" watchObservedRunningTime="2025-10-11 02:57:08.020753161 +0000 UTC m=+638.953840835" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.027825 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.029637 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.031679 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-x7c8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.042506 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.059433 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-blssq"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.066244 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.079893 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.081164 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.086887 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.096348 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.153452 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-ovs-socket\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.153503 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zccv5\" (UniqueName: \"kubernetes.io/projected/61d0daba-b765-4ae3-83e7-68cb0c06e759-kube-api-access-zccv5\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.153540 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-nmstate-lock\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.153597 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh8tn\" (UniqueName: \"kubernetes.io/projected/588d5eb1-6c32-47f6-a332-6d07684c7381-kube-api-access-fh8tn\") pod \"nmstate-metrics-fdff9cb8d-n6n9m\" (UID: \"588d5eb1-6c32-47f6-a332-6d07684c7381\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.153636 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-dbus-socket\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.162502 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.163299 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.168362 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.168485 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-r8wcs" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.168621 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.172161 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.254849 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.254926 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh8tn\" (UniqueName: \"kubernetes.io/projected/588d5eb1-6c32-47f6-a332-6d07684c7381-kube-api-access-fh8tn\") pod \"nmstate-metrics-fdff9cb8d-n6n9m\" (UID: \"588d5eb1-6c32-47f6-a332-6d07684c7381\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.254954 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/017885e6-ffff-4db6-85eb-bc0fb03170ac-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.254988 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-dbus-socket\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255018 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg7ch\" (UniqueName: \"kubernetes.io/projected/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-kube-api-access-fg7ch\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255048 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-ovs-socket\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255070 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnwj9\" (UniqueName: \"kubernetes.io/projected/017885e6-ffff-4db6-85eb-bc0fb03170ac-kube-api-access-lnwj9\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255096 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zccv5\" (UniqueName: \"kubernetes.io/projected/61d0daba-b765-4ae3-83e7-68cb0c06e759-kube-api-access-zccv5\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255127 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255158 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-nmstate-lock\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255247 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-nmstate-lock\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255811 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-dbus-socket\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.255826 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/61d0daba-b765-4ae3-83e7-68cb0c06e759-ovs-socket\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.275915 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zccv5\" (UniqueName: \"kubernetes.io/projected/61d0daba-b765-4ae3-83e7-68cb0c06e759-kube-api-access-zccv5\") pod \"nmstate-handler-blssq\" (UID: \"61d0daba-b765-4ae3-83e7-68cb0c06e759\") " pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.284842 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh8tn\" (UniqueName: \"kubernetes.io/projected/588d5eb1-6c32-47f6-a332-6d07684c7381-kube-api-access-fh8tn\") pod \"nmstate-metrics-fdff9cb8d-n6n9m\" (UID: \"588d5eb1-6c32-47f6-a332-6d07684c7381\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.352053 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.356146 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.356208 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/017885e6-ffff-4db6-85eb-bc0fb03170ac-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.356251 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg7ch\" (UniqueName: \"kubernetes.io/projected/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-kube-api-access-fg7ch\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.356282 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnwj9\" (UniqueName: \"kubernetes.io/projected/017885e6-ffff-4db6-85eb-bc0fb03170ac-kube-api-access-lnwj9\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.356317 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: E1011 02:57:09.356981 4953 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 11 02:57:09 crc kubenswrapper[4953]: E1011 02:57:09.357050 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/017885e6-ffff-4db6-85eb-bc0fb03170ac-tls-key-pair podName:017885e6-ffff-4db6-85eb-bc0fb03170ac nodeName:}" failed. No retries permitted until 2025-10-11 02:57:09.857029881 +0000 UTC m=+640.790117525 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/017885e6-ffff-4db6-85eb-bc0fb03170ac-tls-key-pair") pod "nmstate-webhook-6cdbc54649-8s6t4" (UID: "017885e6-ffff-4db6-85eb-bc0fb03170ac") : secret "openshift-nmstate-webhook" not found Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.358906 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.359475 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.365472 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6d66db4bb7-mrc8f"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.366186 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.379740 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnwj9\" (UniqueName: \"kubernetes.io/projected/017885e6-ffff-4db6-85eb-bc0fb03170ac-kube-api-access-lnwj9\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.385688 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d66db4bb7-mrc8f"] Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.411899 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.450164 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg7ch\" (UniqueName: \"kubernetes.io/projected/32b4d8a6-f845-41f4-808f-2d8ecd8f4b83-kube-api-access-fg7ch\") pod \"nmstate-console-plugin-6b874cbd85-72xj8\" (UID: \"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457140 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-serving-cert\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457179 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-config\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457197 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdknd\" (UniqueName: \"kubernetes.io/projected/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-kube-api-access-vdknd\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457221 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-trusted-ca-bundle\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457240 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-oauth-serving-cert\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457259 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-oauth-config\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.457330 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-service-ca\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.486945 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558366 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-serving-cert\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558407 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-config\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558429 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdknd\" (UniqueName: \"kubernetes.io/projected/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-kube-api-access-vdknd\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558456 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-trusted-ca-bundle\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558477 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-oauth-serving-cert\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558498 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-oauth-config\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.558523 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-service-ca\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.559370 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-service-ca\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.559967 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-oauth-serving-cert\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.560055 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-trusted-ca-bundle\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.560254 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-config\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.563414 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-oauth-config\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.566037 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-console-serving-cert\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.575512 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdknd\" (UniqueName: \"kubernetes.io/projected/dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7-kube-api-access-vdknd\") pod \"console-6d66db4bb7-mrc8f\" (UID: \"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7\") " pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.605316 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m"] Oct 11 02:57:09 crc kubenswrapper[4953]: W1011 02:57:09.608728 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod588d5eb1_6c32_47f6_a332_6d07684c7381.slice/crio-2e31bf2f771ad44e8e2a13f2bbd2e097ddd0a982df8c9789d0368559c78fba43 WatchSource:0}: Error finding container 2e31bf2f771ad44e8e2a13f2bbd2e097ddd0a982df8c9789d0368559c78fba43: Status 404 returned error can't find the container with id 2e31bf2f771ad44e8e2a13f2bbd2e097ddd0a982df8c9789d0368559c78fba43 Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.764816 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.856740 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8"] Oct 11 02:57:09 crc kubenswrapper[4953]: W1011 02:57:09.862434 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32b4d8a6_f845_41f4_808f_2d8ecd8f4b83.slice/crio-da9129de259f32181251655ce9acdcfea317d73da95fb4a3120dd279110aa8d7 WatchSource:0}: Error finding container da9129de259f32181251655ce9acdcfea317d73da95fb4a3120dd279110aa8d7: Status 404 returned error can't find the container with id da9129de259f32181251655ce9acdcfea317d73da95fb4a3120dd279110aa8d7 Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.862492 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/017885e6-ffff-4db6-85eb-bc0fb03170ac-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:09 crc kubenswrapper[4953]: I1011 02:57:09.867958 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/017885e6-ffff-4db6-85eb-bc0fb03170ac-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-8s6t4\" (UID: \"017885e6-ffff-4db6-85eb-bc0fb03170ac\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:10 crc kubenswrapper[4953]: I1011 02:57:10.016527 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" event={"ID":"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83","Type":"ContainerStarted","Data":"da9129de259f32181251655ce9acdcfea317d73da95fb4a3120dd279110aa8d7"} Oct 11 02:57:10 crc kubenswrapper[4953]: I1011 02:57:10.017489 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:10 crc kubenswrapper[4953]: I1011 02:57:10.017951 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-blssq" event={"ID":"61d0daba-b765-4ae3-83e7-68cb0c06e759","Type":"ContainerStarted","Data":"bc262a7afe2332cbefbcbd9e498d5f229d8e7933d0fad98669e7798e1f8535ee"} Oct 11 02:57:10 crc kubenswrapper[4953]: I1011 02:57:10.018832 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" event={"ID":"588d5eb1-6c32-47f6-a332-6d07684c7381","Type":"ContainerStarted","Data":"2e31bf2f771ad44e8e2a13f2bbd2e097ddd0a982df8c9789d0368559c78fba43"} Oct 11 02:57:10 crc kubenswrapper[4953]: I1011 02:57:10.157731 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d66db4bb7-mrc8f"] Oct 11 02:57:10 crc kubenswrapper[4953]: W1011 02:57:10.158059 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9b5ac9_5d7d_45e4_be8e_8aff21f0a6e7.slice/crio-2b9cf5a380bd549d9948c2022094eb79f2761f485d963697336740383fc8d1c9 WatchSource:0}: Error finding container 2b9cf5a380bd549d9948c2022094eb79f2761f485d963697336740383fc8d1c9: Status 404 returned error can't find the container with id 2b9cf5a380bd549d9948c2022094eb79f2761f485d963697336740383fc8d1c9 Oct 11 02:57:10 crc kubenswrapper[4953]: I1011 02:57:10.418819 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4"] Oct 11 02:57:11 crc kubenswrapper[4953]: I1011 02:57:11.024583 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" event={"ID":"017885e6-ffff-4db6-85eb-bc0fb03170ac","Type":"ContainerStarted","Data":"f7571452a24712467136fa5e0e2ca0f165fa6b5f3f717950efe645b5072dd010"} Oct 11 02:57:11 crc kubenswrapper[4953]: I1011 02:57:11.026421 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d66db4bb7-mrc8f" event={"ID":"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7","Type":"ContainerStarted","Data":"0c4e5f6bb1a288856259eaabfbe92bbb47dd7fc67adece425d1d3b37fd764d51"} Oct 11 02:57:11 crc kubenswrapper[4953]: I1011 02:57:11.026446 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d66db4bb7-mrc8f" event={"ID":"dc9b5ac9-5d7d-45e4-be8e-8aff21f0a6e7","Type":"ContainerStarted","Data":"2b9cf5a380bd549d9948c2022094eb79f2761f485d963697336740383fc8d1c9"} Oct 11 02:57:11 crc kubenswrapper[4953]: I1011 02:57:11.050249 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6d66db4bb7-mrc8f" podStartSLOduration=2.050234608 podStartE2EDuration="2.050234608s" podCreationTimestamp="2025-10-11 02:57:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:57:11.045477016 +0000 UTC m=+641.978564670" watchObservedRunningTime="2025-10-11 02:57:11.050234608 +0000 UTC m=+641.983322242" Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.038323 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-blssq" event={"ID":"61d0daba-b765-4ae3-83e7-68cb0c06e759","Type":"ContainerStarted","Data":"ad6ef054bd06fc6383c7b61ec516612ea9514f3ab159e256db24eb4a778fb778"} Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.043366 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.043408 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" event={"ID":"017885e6-ffff-4db6-85eb-bc0fb03170ac","Type":"ContainerStarted","Data":"c1677b9d856dee87dac661b478b823ee31fdb3404e48ee2cb3c7f1a330f28dd7"} Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.043434 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.043451 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" event={"ID":"588d5eb1-6c32-47f6-a332-6d07684c7381","Type":"ContainerStarted","Data":"582e1731b7e34c43d54d8bf8268341f3ba0664c61b204fec492682ce6b2e8c5b"} Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.058219 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-blssq" podStartSLOduration=0.792525406 podStartE2EDuration="3.058199285s" podCreationTimestamp="2025-10-11 02:57:09 +0000 UTC" firstStartedPulling="2025-10-11 02:57:09.474758455 +0000 UTC m=+640.407846099" lastFinishedPulling="2025-10-11 02:57:11.740432334 +0000 UTC m=+642.673519978" observedRunningTime="2025-10-11 02:57:12.055921117 +0000 UTC m=+642.989008791" watchObservedRunningTime="2025-10-11 02:57:12.058199285 +0000 UTC m=+642.991286929" Oct 11 02:57:12 crc kubenswrapper[4953]: I1011 02:57:12.070508 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" podStartSLOduration=1.772454934 podStartE2EDuration="3.070493811s" podCreationTimestamp="2025-10-11 02:57:09 +0000 UTC" firstStartedPulling="2025-10-11 02:57:10.429547837 +0000 UTC m=+641.362635481" lastFinishedPulling="2025-10-11 02:57:11.727586714 +0000 UTC m=+642.660674358" observedRunningTime="2025-10-11 02:57:12.068762127 +0000 UTC m=+643.001849761" watchObservedRunningTime="2025-10-11 02:57:12.070493811 +0000 UTC m=+643.003581455" Oct 11 02:57:13 crc kubenswrapper[4953]: I1011 02:57:13.052022 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" event={"ID":"32b4d8a6-f845-41f4-808f-2d8ecd8f4b83","Type":"ContainerStarted","Data":"f566fc269bddc56f6dfdf4f8f8600b9ce33724edd190ae37f4b0c577fe4942d1"} Oct 11 02:57:13 crc kubenswrapper[4953]: I1011 02:57:13.069637 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-72xj8" podStartSLOduration=1.278246561 podStartE2EDuration="4.069591931s" podCreationTimestamp="2025-10-11 02:57:09 +0000 UTC" firstStartedPulling="2025-10-11 02:57:09.865725216 +0000 UTC m=+640.798812860" lastFinishedPulling="2025-10-11 02:57:12.657070586 +0000 UTC m=+643.590158230" observedRunningTime="2025-10-11 02:57:13.068861093 +0000 UTC m=+644.001948737" watchObservedRunningTime="2025-10-11 02:57:13.069591931 +0000 UTC m=+644.002679595" Oct 11 02:57:15 crc kubenswrapper[4953]: I1011 02:57:15.067940 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" event={"ID":"588d5eb1-6c32-47f6-a332-6d07684c7381","Type":"ContainerStarted","Data":"3536013d80c6c62c646cefb5876af22a0c2f2442ee6636a73bc97120f1e9e632"} Oct 11 02:57:15 crc kubenswrapper[4953]: I1011 02:57:15.102432 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-n6n9m" podStartSLOduration=1.619041943 podStartE2EDuration="6.102364549s" podCreationTimestamp="2025-10-11 02:57:09 +0000 UTC" firstStartedPulling="2025-10-11 02:57:09.610632444 +0000 UTC m=+640.543720088" lastFinishedPulling="2025-10-11 02:57:14.09395505 +0000 UTC m=+645.027042694" observedRunningTime="2025-10-11 02:57:15.094277741 +0000 UTC m=+646.027365465" watchObservedRunningTime="2025-10-11 02:57:15.102364549 +0000 UTC m=+646.035452233" Oct 11 02:57:19 crc kubenswrapper[4953]: I1011 02:57:19.458957 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-blssq" Oct 11 02:57:19 crc kubenswrapper[4953]: I1011 02:57:19.766131 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:19 crc kubenswrapper[4953]: I1011 02:57:19.766212 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:19 crc kubenswrapper[4953]: I1011 02:57:19.788738 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:20 crc kubenswrapper[4953]: I1011 02:57:20.118136 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6d66db4bb7-mrc8f" Oct 11 02:57:20 crc kubenswrapper[4953]: I1011 02:57:20.183794 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cv9v6"] Oct 11 02:57:30 crc kubenswrapper[4953]: I1011 02:57:30.024740 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-8s6t4" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.282495 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-cv9v6" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerName="console" containerID="cri-o://bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98" gracePeriod=15 Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.676006 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cv9v6_e2235ac4-5a50-4e61-ac95-f9ce54c104c8/console/0.log" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.676352 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744582 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-trusted-ca-bundle\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744651 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-serving-cert\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744708 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-config\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744757 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-oauth-serving-cert\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744785 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92wgt\" (UniqueName: \"kubernetes.io/projected/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-kube-api-access-92wgt\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744804 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-oauth-config\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.744900 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-service-ca\") pod \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\" (UID: \"e2235ac4-5a50-4e61-ac95-f9ce54c104c8\") " Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.745450 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.745460 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-config" (OuterVolumeSpecName: "console-config") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.745488 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.745867 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-service-ca" (OuterVolumeSpecName: "service-ca") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.746391 4953 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-service-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.746414 4953 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.746425 4953 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.746435 4953 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.751502 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-kube-api-access-92wgt" (OuterVolumeSpecName: "kube-api-access-92wgt") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "kube-api-access-92wgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.751705 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.759973 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "e2235ac4-5a50-4e61-ac95-f9ce54c104c8" (UID: "e2235ac4-5a50-4e61-ac95-f9ce54c104c8"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.847840 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92wgt\" (UniqueName: \"kubernetes.io/projected/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-kube-api-access-92wgt\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.848760 4953 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:45 crc kubenswrapper[4953]: I1011 02:57:45.848777 4953 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e2235ac4-5a50-4e61-ac95-f9ce54c104c8-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.282907 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cv9v6_e2235ac4-5a50-4e61-ac95-f9ce54c104c8/console/0.log" Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.282996 4953 generic.go:334] "Generic (PLEG): container finished" podID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerID="bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98" exitCode=2 Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.283043 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cv9v6" event={"ID":"e2235ac4-5a50-4e61-ac95-f9ce54c104c8","Type":"ContainerDied","Data":"bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98"} Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.283097 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cv9v6" event={"ID":"e2235ac4-5a50-4e61-ac95-f9ce54c104c8","Type":"ContainerDied","Data":"530d6855488a312a5bc0f238a920a5a7f26b7f5a7427d01f1e75033098cccbf3"} Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.283104 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cv9v6" Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.283127 4953 scope.go:117] "RemoveContainer" containerID="bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98" Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.308514 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cv9v6"] Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.309982 4953 scope.go:117] "RemoveContainer" containerID="bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98" Oct 11 02:57:46 crc kubenswrapper[4953]: E1011 02:57:46.310523 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98\": container with ID starting with bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98 not found: ID does not exist" containerID="bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98" Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.310574 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98"} err="failed to get container status \"bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98\": rpc error: code = NotFound desc = could not find container \"bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98\": container with ID starting with bb3075efa2a92e340177cdfe7bd406976de4dc581640ca0c0b32695d28ff7d98 not found: ID does not exist" Oct 11 02:57:46 crc kubenswrapper[4953]: I1011 02:57:46.313308 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-cv9v6"] Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.227856 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl"] Oct 11 02:57:47 crc kubenswrapper[4953]: E1011 02:57:47.228162 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerName="console" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.228181 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerName="console" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.228707 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" containerName="console" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.229736 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.233038 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.249252 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl"] Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.268316 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.268425 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.268504 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdlf9\" (UniqueName: \"kubernetes.io/projected/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-kube-api-access-zdlf9\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.370202 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.370290 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.370378 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdlf9\" (UniqueName: \"kubernetes.io/projected/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-kube-api-access-zdlf9\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.371222 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.371317 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.407777 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdlf9\" (UniqueName: \"kubernetes.io/projected/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-kube-api-access-zdlf9\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.546261 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.780280 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl"] Oct 11 02:57:47 crc kubenswrapper[4953]: I1011 02:57:47.802028 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2235ac4-5a50-4e61-ac95-f9ce54c104c8" path="/var/lib/kubelet/pods/e2235ac4-5a50-4e61-ac95-f9ce54c104c8/volumes" Oct 11 02:57:48 crc kubenswrapper[4953]: I1011 02:57:48.302548 4953 generic.go:334] "Generic (PLEG): container finished" podID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerID="c17080a214444ebfec72232f5fcb0263988ac6ffca8b8585aab4439f5af1d466" exitCode=0 Oct 11 02:57:48 crc kubenswrapper[4953]: I1011 02:57:48.302644 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" event={"ID":"1150ef11-aab8-4d20-a5de-8f5c6dffc76a","Type":"ContainerDied","Data":"c17080a214444ebfec72232f5fcb0263988ac6ffca8b8585aab4439f5af1d466"} Oct 11 02:57:48 crc kubenswrapper[4953]: I1011 02:57:48.302693 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" event={"ID":"1150ef11-aab8-4d20-a5de-8f5c6dffc76a","Type":"ContainerStarted","Data":"cc11b620b30257084236bdbb95224fe921cbe5934737f5bc993742977a26983e"} Oct 11 02:57:50 crc kubenswrapper[4953]: I1011 02:57:50.317950 4953 generic.go:334] "Generic (PLEG): container finished" podID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerID="287446bf8a4ce8fb0589a55aad09e057ac538a63cacddaff5f9d4072cf574c50" exitCode=0 Oct 11 02:57:50 crc kubenswrapper[4953]: I1011 02:57:50.318030 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" event={"ID":"1150ef11-aab8-4d20-a5de-8f5c6dffc76a","Type":"ContainerDied","Data":"287446bf8a4ce8fb0589a55aad09e057ac538a63cacddaff5f9d4072cf574c50"} Oct 11 02:57:51 crc kubenswrapper[4953]: I1011 02:57:51.331372 4953 generic.go:334] "Generic (PLEG): container finished" podID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerID="f7d1b71eba0dc08f4c1c8776b2ac81fdb42fd05ba7ca51686e5d7355d41e1496" exitCode=0 Oct 11 02:57:51 crc kubenswrapper[4953]: I1011 02:57:51.331414 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" event={"ID":"1150ef11-aab8-4d20-a5de-8f5c6dffc76a","Type":"ContainerDied","Data":"f7d1b71eba0dc08f4c1c8776b2ac81fdb42fd05ba7ca51686e5d7355d41e1496"} Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.665220 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.756904 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdlf9\" (UniqueName: \"kubernetes.io/projected/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-kube-api-access-zdlf9\") pod \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.757250 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-util\") pod \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.757314 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-bundle\") pod \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\" (UID: \"1150ef11-aab8-4d20-a5de-8f5c6dffc76a\") " Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.758902 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-bundle" (OuterVolumeSpecName: "bundle") pod "1150ef11-aab8-4d20-a5de-8f5c6dffc76a" (UID: "1150ef11-aab8-4d20-a5de-8f5c6dffc76a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.766900 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-kube-api-access-zdlf9" (OuterVolumeSpecName: "kube-api-access-zdlf9") pod "1150ef11-aab8-4d20-a5de-8f5c6dffc76a" (UID: "1150ef11-aab8-4d20-a5de-8f5c6dffc76a"). InnerVolumeSpecName "kube-api-access-zdlf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.770765 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-util" (OuterVolumeSpecName: "util") pod "1150ef11-aab8-4d20-a5de-8f5c6dffc76a" (UID: "1150ef11-aab8-4d20-a5de-8f5c6dffc76a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.858478 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdlf9\" (UniqueName: \"kubernetes.io/projected/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-kube-api-access-zdlf9\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.858539 4953 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-util\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:52 crc kubenswrapper[4953]: I1011 02:57:52.858553 4953 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1150ef11-aab8-4d20-a5de-8f5c6dffc76a-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:57:53 crc kubenswrapper[4953]: I1011 02:57:53.349953 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" event={"ID":"1150ef11-aab8-4d20-a5de-8f5c6dffc76a","Type":"ContainerDied","Data":"cc11b620b30257084236bdbb95224fe921cbe5934737f5bc993742977a26983e"} Oct 11 02:57:53 crc kubenswrapper[4953]: I1011 02:57:53.350017 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl" Oct 11 02:57:53 crc kubenswrapper[4953]: I1011 02:57:53.350035 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc11b620b30257084236bdbb95224fe921cbe5934737f5bc993742977a26983e" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.887864 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz"] Oct 11 02:58:00 crc kubenswrapper[4953]: E1011 02:58:00.888579 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="pull" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.888590 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="pull" Oct 11 02:58:00 crc kubenswrapper[4953]: E1011 02:58:00.888617 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="util" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.888624 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="util" Oct 11 02:58:00 crc kubenswrapper[4953]: E1011 02:58:00.888633 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="extract" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.888639 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="extract" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.888749 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1150ef11-aab8-4d20-a5de-8f5c6dffc76a" containerName="extract" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.889085 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.890643 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.891208 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.891282 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.891466 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.891636 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-wz52g" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.910152 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz"] Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.961237 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8h88\" (UniqueName: \"kubernetes.io/projected/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-kube-api-access-g8h88\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.961296 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-webhook-cert\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:00 crc kubenswrapper[4953]: I1011 02:58:00.961365 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-apiservice-cert\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.062141 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-apiservice-cert\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.062211 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8h88\" (UniqueName: \"kubernetes.io/projected/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-kube-api-access-g8h88\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.062229 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-webhook-cert\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.067943 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-webhook-cert\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.082480 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-apiservice-cert\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.083532 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8h88\" (UniqueName: \"kubernetes.io/projected/456dd569-e9b3-4d3f-b5cd-524cc59e5c3c-kube-api-access-g8h88\") pod \"metallb-operator-controller-manager-5b44f7b76f-hxtgz\" (UID: \"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c\") " pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.206804 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.218251 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n"] Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.219100 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.221888 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.222382 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-92zn5" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.232011 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.246926 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n"] Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.265049 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9905d9f2-d2da-44b2-92c6-fbf3420250ae-webhook-cert\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.265099 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbgl2\" (UniqueName: \"kubernetes.io/projected/9905d9f2-d2da-44b2-92c6-fbf3420250ae-kube-api-access-xbgl2\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.265132 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9905d9f2-d2da-44b2-92c6-fbf3420250ae-apiservice-cert\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.366310 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbgl2\" (UniqueName: \"kubernetes.io/projected/9905d9f2-d2da-44b2-92c6-fbf3420250ae-kube-api-access-xbgl2\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.366370 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9905d9f2-d2da-44b2-92c6-fbf3420250ae-apiservice-cert\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.366441 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9905d9f2-d2da-44b2-92c6-fbf3420250ae-webhook-cert\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.372337 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9905d9f2-d2da-44b2-92c6-fbf3420250ae-webhook-cert\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.376183 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9905d9f2-d2da-44b2-92c6-fbf3420250ae-apiservice-cert\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.393761 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbgl2\" (UniqueName: \"kubernetes.io/projected/9905d9f2-d2da-44b2-92c6-fbf3420250ae-kube-api-access-xbgl2\") pod \"metallb-operator-webhook-server-9cf975fdf-b4d8n\" (UID: \"9905d9f2-d2da-44b2-92c6-fbf3420250ae\") " pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.542530 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz"] Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.567230 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:01 crc kubenswrapper[4953]: I1011 02:58:01.776458 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n"] Oct 11 02:58:01 crc kubenswrapper[4953]: W1011 02:58:01.782463 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9905d9f2_d2da_44b2_92c6_fbf3420250ae.slice/crio-193644e3d9c40501142b4cbc1861ac287b6f9863b879f65c3b48a7b185793f9c WatchSource:0}: Error finding container 193644e3d9c40501142b4cbc1861ac287b6f9863b879f65c3b48a7b185793f9c: Status 404 returned error can't find the container with id 193644e3d9c40501142b4cbc1861ac287b6f9863b879f65c3b48a7b185793f9c Oct 11 02:58:02 crc kubenswrapper[4953]: I1011 02:58:02.423574 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" event={"ID":"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c","Type":"ContainerStarted","Data":"f4690a72a60168670a82bbfd302617ee690f23a30ce220c2f54535a4482d0b03"} Oct 11 02:58:02 crc kubenswrapper[4953]: I1011 02:58:02.425049 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" event={"ID":"9905d9f2-d2da-44b2-92c6-fbf3420250ae","Type":"ContainerStarted","Data":"193644e3d9c40501142b4cbc1861ac287b6f9863b879f65c3b48a7b185793f9c"} Oct 11 02:58:04 crc kubenswrapper[4953]: I1011 02:58:04.440107 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" event={"ID":"456dd569-e9b3-4d3f-b5cd-524cc59e5c3c","Type":"ContainerStarted","Data":"82beae7f02487eff31fd59d3edf3d14a50f6df20be6cc1a863cd0d8a13b95808"} Oct 11 02:58:04 crc kubenswrapper[4953]: I1011 02:58:04.440588 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:04 crc kubenswrapper[4953]: I1011 02:58:04.461526 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" podStartSLOduration=1.791470788 podStartE2EDuration="4.461506069s" podCreationTimestamp="2025-10-11 02:58:00 +0000 UTC" firstStartedPulling="2025-10-11 02:58:01.55149499 +0000 UTC m=+692.484582644" lastFinishedPulling="2025-10-11 02:58:04.221530281 +0000 UTC m=+695.154617925" observedRunningTime="2025-10-11 02:58:04.458451742 +0000 UTC m=+695.391539426" watchObservedRunningTime="2025-10-11 02:58:04.461506069 +0000 UTC m=+695.394593713" Oct 11 02:58:06 crc kubenswrapper[4953]: I1011 02:58:06.451757 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" event={"ID":"9905d9f2-d2da-44b2-92c6-fbf3420250ae","Type":"ContainerStarted","Data":"a843a7f16edb2d7371a086a182ad79cbb0ba710d87c40f63afed68cc796861f1"} Oct 11 02:58:06 crc kubenswrapper[4953]: I1011 02:58:06.452156 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:21 crc kubenswrapper[4953]: I1011 02:58:21.571801 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" Oct 11 02:58:21 crc kubenswrapper[4953]: I1011 02:58:21.591972 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-9cf975fdf-b4d8n" podStartSLOduration=16.65445808 podStartE2EDuration="20.591955312s" podCreationTimestamp="2025-10-11 02:58:01 +0000 UTC" firstStartedPulling="2025-10-11 02:58:01.785771213 +0000 UTC m=+692.718858857" lastFinishedPulling="2025-10-11 02:58:05.723268425 +0000 UTC m=+696.656356089" observedRunningTime="2025-10-11 02:58:06.473536932 +0000 UTC m=+697.406624596" watchObservedRunningTime="2025-10-11 02:58:21.591955312 +0000 UTC m=+712.525042976" Oct 11 02:58:41 crc kubenswrapper[4953]: I1011 02:58:41.208675 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5b44f7b76f-hxtgz" Oct 11 02:58:41 crc kubenswrapper[4953]: I1011 02:58:41.316161 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:58:41 crc kubenswrapper[4953]: I1011 02:58:41.316507 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.041538 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-mpn4b"] Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.044906 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.047861 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.048237 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-klsrr" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.050861 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.052650 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6"] Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.053631 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.059448 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.071128 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6"] Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.153037 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-9mcll"] Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.154084 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.158808 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.158810 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-tj9fk" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.158912 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.159004 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.159819 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-gfm9p"] Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.160652 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.164857 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.192135 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-gfm9p"] Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223030 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223090 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7952\" (UniqueName: \"kubernetes.io/projected/d5b760ea-e0f2-4076-becf-3e9d0f416554-kube-api-access-w7952\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223120 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/290c040c-f640-487f-aca3-6c941d0b364b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-t96g6\" (UID: \"290c040c-f640-487f-aca3-6c941d0b364b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223144 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9332601a-ba82-4e65-b207-f2449666e8e3-cert\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223170 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcphc\" (UniqueName: \"kubernetes.io/projected/290c040c-f640-487f-aca3-6c941d0b364b-kube-api-access-jcphc\") pod \"frr-k8s-webhook-server-64bf5d555-t96g6\" (UID: \"290c040c-f640-487f-aca3-6c941d0b364b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223193 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-metrics\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223214 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-reloader\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223236 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-metrics-certs\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223258 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-conf\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223283 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kccq8\" (UniqueName: \"kubernetes.io/projected/9332601a-ba82-4e65-b207-f2449666e8e3-kube-api-access-kccq8\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223316 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5b760ea-e0f2-4076-becf-3e9d0f416554-metallb-excludel2\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223354 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-sockets\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223383 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-metrics-certs\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223402 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-startup\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223424 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zktpv\" (UniqueName: \"kubernetes.io/projected/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-kube-api-access-zktpv\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.223446 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9332601a-ba82-4e65-b207-f2449666e8e3-metrics-certs\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324154 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-metrics-certs\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324206 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-startup\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324230 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zktpv\" (UniqueName: \"kubernetes.io/projected/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-kube-api-access-zktpv\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324273 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9332601a-ba82-4e65-b207-f2449666e8e3-metrics-certs\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324313 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: E1011 02:58:42.324328 4953 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324335 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7952\" (UniqueName: \"kubernetes.io/projected/d5b760ea-e0f2-4076-becf-3e9d0f416554-kube-api-access-w7952\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: E1011 02:58:42.324386 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-metrics-certs podName:d5b760ea-e0f2-4076-becf-3e9d0f416554 nodeName:}" failed. No retries permitted until 2025-10-11 02:58:42.824368694 +0000 UTC m=+733.757456338 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-metrics-certs") pod "speaker-9mcll" (UID: "d5b760ea-e0f2-4076-becf-3e9d0f416554") : secret "speaker-certs-secret" not found Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324461 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/290c040c-f640-487f-aca3-6c941d0b364b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-t96g6\" (UID: \"290c040c-f640-487f-aca3-6c941d0b364b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324505 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9332601a-ba82-4e65-b207-f2449666e8e3-cert\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324527 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcphc\" (UniqueName: \"kubernetes.io/projected/290c040c-f640-487f-aca3-6c941d0b364b-kube-api-access-jcphc\") pod \"frr-k8s-webhook-server-64bf5d555-t96g6\" (UID: \"290c040c-f640-487f-aca3-6c941d0b364b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324547 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-metrics\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324565 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-reloader\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324615 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-metrics-certs\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324637 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-conf\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324659 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kccq8\" (UniqueName: \"kubernetes.io/projected/9332601a-ba82-4e65-b207-f2449666e8e3-kube-api-access-kccq8\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324682 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5b760ea-e0f2-4076-becf-3e9d0f416554-metallb-excludel2\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.324735 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-sockets\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.325120 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-sockets\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.325387 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-reloader\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: E1011 02:58:42.325409 4953 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.325461 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-startup\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: E1011 02:58:42.325552 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist podName:d5b760ea-e0f2-4076-becf-3e9d0f416554 nodeName:}" failed. No retries permitted until 2025-10-11 02:58:42.825480572 +0000 UTC m=+733.758568306 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist") pod "speaker-9mcll" (UID: "d5b760ea-e0f2-4076-becf-3e9d0f416554") : secret "metallb-memberlist" not found Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.325586 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-frr-conf\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.325889 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-metrics\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.326406 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/d5b760ea-e0f2-4076-becf-3e9d0f416554-metallb-excludel2\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.328361 4953 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.331929 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-metrics-certs\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.333009 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9332601a-ba82-4e65-b207-f2449666e8e3-metrics-certs\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.338562 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/290c040c-f640-487f-aca3-6c941d0b364b-cert\") pod \"frr-k8s-webhook-server-64bf5d555-t96g6\" (UID: \"290c040c-f640-487f-aca3-6c941d0b364b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.340730 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9332601a-ba82-4e65-b207-f2449666e8e3-cert\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.347244 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zktpv\" (UniqueName: \"kubernetes.io/projected/2219b4a8-f50c-4b3f-87b9-107fd5cb9256-kube-api-access-zktpv\") pod \"frr-k8s-mpn4b\" (UID: \"2219b4a8-f50c-4b3f-87b9-107fd5cb9256\") " pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.351929 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kccq8\" (UniqueName: \"kubernetes.io/projected/9332601a-ba82-4e65-b207-f2449666e8e3-kube-api-access-kccq8\") pod \"controller-68d546b9d8-gfm9p\" (UID: \"9332601a-ba82-4e65-b207-f2449666e8e3\") " pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.352781 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7952\" (UniqueName: \"kubernetes.io/projected/d5b760ea-e0f2-4076-becf-3e9d0f416554-kube-api-access-w7952\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.355203 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcphc\" (UniqueName: \"kubernetes.io/projected/290c040c-f640-487f-aca3-6c941d0b364b-kube-api-access-jcphc\") pod \"frr-k8s-webhook-server-64bf5d555-t96g6\" (UID: \"290c040c-f640-487f-aca3-6c941d0b364b\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.376888 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.397255 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.476794 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.624463 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6"] Oct 11 02:58:42 crc kubenswrapper[4953]: W1011 02:58:42.637442 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod290c040c_f640_487f_aca3_6c941d0b364b.slice/crio-4ad6e899cd99eb9eb06d674f5976ea76035cce20310108d3183d2f2a59dad07f WatchSource:0}: Error finding container 4ad6e899cd99eb9eb06d674f5976ea76035cce20310108d3183d2f2a59dad07f: Status 404 returned error can't find the container with id 4ad6e899cd99eb9eb06d674f5976ea76035cce20310108d3183d2f2a59dad07f Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.708543 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"8393543e7f59e49c9064729abb8214bd5c6c7e7af35ca7082b9de9081514823c"} Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.709312 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" event={"ID":"290c040c-f640-487f-aca3-6c941d0b364b","Type":"ContainerStarted","Data":"4ad6e899cd99eb9eb06d674f5976ea76035cce20310108d3183d2f2a59dad07f"} Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.834824 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-metrics-certs\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.834994 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: E1011 02:58:42.835128 4953 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 11 02:58:42 crc kubenswrapper[4953]: E1011 02:58:42.835196 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist podName:d5b760ea-e0f2-4076-becf-3e9d0f416554 nodeName:}" failed. No retries permitted until 2025-10-11 02:58:43.835177618 +0000 UTC m=+734.768265262 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist") pod "speaker-9mcll" (UID: "d5b760ea-e0f2-4076-becf-3e9d0f416554") : secret "metallb-memberlist" not found Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.844480 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-metrics-certs\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:42 crc kubenswrapper[4953]: I1011 02:58:42.946829 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-gfm9p"] Oct 11 02:58:42 crc kubenswrapper[4953]: W1011 02:58:42.952397 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9332601a_ba82_4e65_b207_f2449666e8e3.slice/crio-0f5702c33b7467ea6cb80699f8b0dc7470058911eb529d3ea9320af420272710 WatchSource:0}: Error finding container 0f5702c33b7467ea6cb80699f8b0dc7470058911eb529d3ea9320af420272710: Status 404 returned error can't find the container with id 0f5702c33b7467ea6cb80699f8b0dc7470058911eb529d3ea9320af420272710 Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.720400 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-gfm9p" event={"ID":"9332601a-ba82-4e65-b207-f2449666e8e3","Type":"ContainerStarted","Data":"8b7b7ab55ec5fec02e0821070dc303b2503c19039d60c798cef3a78581306e89"} Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.720818 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-gfm9p" event={"ID":"9332601a-ba82-4e65-b207-f2449666e8e3","Type":"ContainerStarted","Data":"a92fcd64d3fbe6443f2230fb9fe0c6dda2815a5a818cbbd0dd903776469487f1"} Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.720955 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.720969 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-gfm9p" event={"ID":"9332601a-ba82-4e65-b207-f2449666e8e3","Type":"ContainerStarted","Data":"0f5702c33b7467ea6cb80699f8b0dc7470058911eb529d3ea9320af420272710"} Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.745491 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-gfm9p" podStartSLOduration=1.7454544410000001 podStartE2EDuration="1.745454441s" podCreationTimestamp="2025-10-11 02:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:58:43.739463828 +0000 UTC m=+734.672551482" watchObservedRunningTime="2025-10-11 02:58:43.745454441 +0000 UTC m=+734.678542085" Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.848385 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.853394 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/d5b760ea-e0f2-4076-becf-3e9d0f416554-memberlist\") pod \"speaker-9mcll\" (UID: \"d5b760ea-e0f2-4076-becf-3e9d0f416554\") " pod="metallb-system/speaker-9mcll" Oct 11 02:58:43 crc kubenswrapper[4953]: I1011 02:58:43.970570 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9mcll" Oct 11 02:58:44 crc kubenswrapper[4953]: I1011 02:58:44.727149 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9mcll" event={"ID":"d5b760ea-e0f2-4076-becf-3e9d0f416554","Type":"ContainerStarted","Data":"951b9da922b7309c0250d1bcb8af447fd31535060d07a940fdf8a00d7d1d194a"} Oct 11 02:58:44 crc kubenswrapper[4953]: I1011 02:58:44.727481 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9mcll" event={"ID":"d5b760ea-e0f2-4076-becf-3e9d0f416554","Type":"ContainerStarted","Data":"bb07a611cddb4a45574e99a71981316f4cfb97faa2d8e8a335395cca80fc9dda"} Oct 11 02:58:44 crc kubenswrapper[4953]: I1011 02:58:44.727491 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9mcll" event={"ID":"d5b760ea-e0f2-4076-becf-3e9d0f416554","Type":"ContainerStarted","Data":"1ce4607d386559871eaf1f01b38171488e859b5e5d591dc41f999452dd05d2ac"} Oct 11 02:58:44 crc kubenswrapper[4953]: I1011 02:58:44.728121 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-9mcll" Oct 11 02:58:44 crc kubenswrapper[4953]: I1011 02:58:44.748098 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-9mcll" podStartSLOduration=2.748058243 podStartE2EDuration="2.748058243s" podCreationTimestamp="2025-10-11 02:58:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:58:44.745067717 +0000 UTC m=+735.678155371" watchObservedRunningTime="2025-10-11 02:58:44.748058243 +0000 UTC m=+735.681145887" Oct 11 02:58:50 crc kubenswrapper[4953]: I1011 02:58:50.766821 4953 generic.go:334] "Generic (PLEG): container finished" podID="2219b4a8-f50c-4b3f-87b9-107fd5cb9256" containerID="9f2e8ea0d615a63e707253fddb47473d255323377d1972f26d3df09a610064f4" exitCode=0 Oct 11 02:58:50 crc kubenswrapper[4953]: I1011 02:58:50.767101 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerDied","Data":"9f2e8ea0d615a63e707253fddb47473d255323377d1972f26d3df09a610064f4"} Oct 11 02:58:50 crc kubenswrapper[4953]: I1011 02:58:50.769988 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" event={"ID":"290c040c-f640-487f-aca3-6c941d0b364b","Type":"ContainerStarted","Data":"35635cc0a3555ceceae34b8053cc1bab3b46d326bf6943cfcdcb345529deacf4"} Oct 11 02:58:50 crc kubenswrapper[4953]: I1011 02:58:50.770160 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:58:50 crc kubenswrapper[4953]: I1011 02:58:50.834391 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" podStartSLOduration=1.7389417649999999 podStartE2EDuration="8.834368431s" podCreationTimestamp="2025-10-11 02:58:42 +0000 UTC" firstStartedPulling="2025-10-11 02:58:42.642734999 +0000 UTC m=+733.575822643" lastFinishedPulling="2025-10-11 02:58:49.738161665 +0000 UTC m=+740.671249309" observedRunningTime="2025-10-11 02:58:50.832762001 +0000 UTC m=+741.765849655" watchObservedRunningTime="2025-10-11 02:58:50.834368431 +0000 UTC m=+741.767456115" Oct 11 02:58:51 crc kubenswrapper[4953]: I1011 02:58:51.778648 4953 generic.go:334] "Generic (PLEG): container finished" podID="2219b4a8-f50c-4b3f-87b9-107fd5cb9256" containerID="7bce9dc254eed193ea9ca28e65735f6e242fbe534f166334172f1ac6f3ddfc59" exitCode=0 Oct 11 02:58:51 crc kubenswrapper[4953]: I1011 02:58:51.778728 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerDied","Data":"7bce9dc254eed193ea9ca28e65735f6e242fbe534f166334172f1ac6f3ddfc59"} Oct 11 02:58:52 crc kubenswrapper[4953]: I1011 02:58:52.787365 4953 generic.go:334] "Generic (PLEG): container finished" podID="2219b4a8-f50c-4b3f-87b9-107fd5cb9256" containerID="2ed094ac881920c087cd53454c1b14a71969dc65f721385948a9f2896e5c091b" exitCode=0 Oct 11 02:58:52 crc kubenswrapper[4953]: I1011 02:58:52.787541 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerDied","Data":"2ed094ac881920c087cd53454c1b14a71969dc65f721385948a9f2896e5c091b"} Oct 11 02:58:53 crc kubenswrapper[4953]: I1011 02:58:53.810288 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"e1e97e088d9d614d23d60b19287edd3874c6142729555e097c66fdd5a599abc2"} Oct 11 02:58:53 crc kubenswrapper[4953]: I1011 02:58:53.810695 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"6577da5c5b2e733955e45135814144398fc6e72a0c3d3bd5321fb251ec9e9829"} Oct 11 02:58:53 crc kubenswrapper[4953]: I1011 02:58:53.810711 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"be2a5680d6a591a7b7d127eab21a9251420987f11e0ab88925a8753196596104"} Oct 11 02:58:53 crc kubenswrapper[4953]: I1011 02:58:53.810722 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"eef0bd3485a8a853f665d4a2c250035208f35f5082e670eb103996e05daea331"} Oct 11 02:58:53 crc kubenswrapper[4953]: I1011 02:58:53.810733 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"6caea4cd283ca79209c27e3d06d1757fdd15985b50c6d3657ba604823fbc6dcd"} Oct 11 02:58:54 crc kubenswrapper[4953]: I1011 02:58:54.810361 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mpn4b" event={"ID":"2219b4a8-f50c-4b3f-87b9-107fd5cb9256","Type":"ContainerStarted","Data":"4e8b3e91fb465086215c418294e8b44ea87f9e16969be13dacfa944d54644aff"} Oct 11 02:58:54 crc kubenswrapper[4953]: I1011 02:58:54.810759 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:54 crc kubenswrapper[4953]: I1011 02:58:54.831695 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-mpn4b" podStartSLOduration=5.616638198 podStartE2EDuration="12.83168028s" podCreationTimestamp="2025-10-11 02:58:42 +0000 UTC" firstStartedPulling="2025-10-11 02:58:42.540181928 +0000 UTC m=+733.473269572" lastFinishedPulling="2025-10-11 02:58:49.75522401 +0000 UTC m=+740.688311654" observedRunningTime="2025-10-11 02:58:54.828768466 +0000 UTC m=+745.761856120" watchObservedRunningTime="2025-10-11 02:58:54.83168028 +0000 UTC m=+745.764767924" Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.377952 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.430798 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.655301 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6tcb8"] Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.655543 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerName="controller-manager" containerID="cri-o://46d1416ef8208393306b4f747aef54823277f7b65dbbf41eb286c3250232ce46" gracePeriod=30 Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.751836 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9"] Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.752040 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" containerName="route-controller-manager" containerID="cri-o://37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634" gracePeriod=30 Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.831487 4953 generic.go:334] "Generic (PLEG): container finished" podID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerID="46d1416ef8208393306b4f747aef54823277f7b65dbbf41eb286c3250232ce46" exitCode=0 Oct 11 02:58:57 crc kubenswrapper[4953]: I1011 02:58:57.831991 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" event={"ID":"def79d52-fc53-4ab1-81c4-d67959b5865f","Type":"ContainerDied","Data":"46d1416ef8208393306b4f747aef54823277f7b65dbbf41eb286c3250232ce46"} Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.083359 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.136265 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.152780 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-client-ca\") pod \"def79d52-fc53-4ab1-81c4-d67959b5865f\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.152851 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-config\") pod \"def79d52-fc53-4ab1-81c4-d67959b5865f\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.152871 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xmtg\" (UniqueName: \"kubernetes.io/projected/def79d52-fc53-4ab1-81c4-d67959b5865f-kube-api-access-7xmtg\") pod \"def79d52-fc53-4ab1-81c4-d67959b5865f\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.152898 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-proxy-ca-bundles\") pod \"def79d52-fc53-4ab1-81c4-d67959b5865f\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.152925 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/def79d52-fc53-4ab1-81c4-d67959b5865f-serving-cert\") pod \"def79d52-fc53-4ab1-81c4-d67959b5865f\" (UID: \"def79d52-fc53-4ab1-81c4-d67959b5865f\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.153822 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-client-ca" (OuterVolumeSpecName: "client-ca") pod "def79d52-fc53-4ab1-81c4-d67959b5865f" (UID: "def79d52-fc53-4ab1-81c4-d67959b5865f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.154224 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "def79d52-fc53-4ab1-81c4-d67959b5865f" (UID: "def79d52-fc53-4ab1-81c4-d67959b5865f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.154380 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-config" (OuterVolumeSpecName: "config") pod "def79d52-fc53-4ab1-81c4-d67959b5865f" (UID: "def79d52-fc53-4ab1-81c4-d67959b5865f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.163412 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/def79d52-fc53-4ab1-81c4-d67959b5865f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "def79d52-fc53-4ab1-81c4-d67959b5865f" (UID: "def79d52-fc53-4ab1-81c4-d67959b5865f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.163430 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/def79d52-fc53-4ab1-81c4-d67959b5865f-kube-api-access-7xmtg" (OuterVolumeSpecName: "kube-api-access-7xmtg") pod "def79d52-fc53-4ab1-81c4-d67959b5865f" (UID: "def79d52-fc53-4ab1-81c4-d67959b5865f"). InnerVolumeSpecName "kube-api-access-7xmtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.253660 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbvgf\" (UniqueName: \"kubernetes.io/projected/2db13bfc-b49d-49d5-b055-2befef69d136-kube-api-access-gbvgf\") pod \"2db13bfc-b49d-49d5-b055-2befef69d136\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.253741 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2db13bfc-b49d-49d5-b055-2befef69d136-serving-cert\") pod \"2db13bfc-b49d-49d5-b055-2befef69d136\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.253835 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-config\") pod \"2db13bfc-b49d-49d5-b055-2befef69d136\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.253900 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-client-ca\") pod \"2db13bfc-b49d-49d5-b055-2befef69d136\" (UID: \"2db13bfc-b49d-49d5-b055-2befef69d136\") " Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.254803 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-client-ca" (OuterVolumeSpecName: "client-ca") pod "2db13bfc-b49d-49d5-b055-2befef69d136" (UID: "2db13bfc-b49d-49d5-b055-2befef69d136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255031 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-config" (OuterVolumeSpecName: "config") pod "2db13bfc-b49d-49d5-b055-2befef69d136" (UID: "2db13bfc-b49d-49d5-b055-2befef69d136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255540 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255591 4953 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255640 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xmtg\" (UniqueName: \"kubernetes.io/projected/def79d52-fc53-4ab1-81c4-d67959b5865f-kube-api-access-7xmtg\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255653 4953 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2db13bfc-b49d-49d5-b055-2befef69d136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255665 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-config\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255676 4953 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/def79d52-fc53-4ab1-81c4-d67959b5865f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.255686 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/def79d52-fc53-4ab1-81c4-d67959b5865f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.258314 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db13bfc-b49d-49d5-b055-2befef69d136-kube-api-access-gbvgf" (OuterVolumeSpecName: "kube-api-access-gbvgf") pod "2db13bfc-b49d-49d5-b055-2befef69d136" (UID: "2db13bfc-b49d-49d5-b055-2befef69d136"). InnerVolumeSpecName "kube-api-access-gbvgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.259324 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db13bfc-b49d-49d5-b055-2befef69d136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2db13bfc-b49d-49d5-b055-2befef69d136" (UID: "2db13bfc-b49d-49d5-b055-2befef69d136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.357015 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbvgf\" (UniqueName: \"kubernetes.io/projected/2db13bfc-b49d-49d5-b055-2befef69d136-kube-api-access-gbvgf\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.357051 4953 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2db13bfc-b49d-49d5-b055-2befef69d136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.840780 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.840782 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6tcb8" event={"ID":"def79d52-fc53-4ab1-81c4-d67959b5865f","Type":"ContainerDied","Data":"13dd3777d33de1d407289732c1e0f1c185257c598947e093d98dc183d1d85bd6"} Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.841961 4953 scope.go:117] "RemoveContainer" containerID="46d1416ef8208393306b4f747aef54823277f7b65dbbf41eb286c3250232ce46" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.842396 4953 generic.go:334] "Generic (PLEG): container finished" podID="2db13bfc-b49d-49d5-b055-2befef69d136" containerID="37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634" exitCode=0 Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.842413 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.842479 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" event={"ID":"2db13bfc-b49d-49d5-b055-2befef69d136","Type":"ContainerDied","Data":"37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634"} Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.842531 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9" event={"ID":"2db13bfc-b49d-49d5-b055-2befef69d136","Type":"ContainerDied","Data":"7241e6aa584f82bd3faa6f540900c87eae3dda1151b59355b9722ce65f0bda83"} Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.873433 4953 scope.go:117] "RemoveContainer" containerID="37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.874704 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9"] Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.877156 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jpcz9"] Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.886181 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6tcb8"] Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.893555 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6tcb8"] Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.896161 4953 scope.go:117] "RemoveContainer" containerID="37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634" Oct 11 02:58:58 crc kubenswrapper[4953]: E1011 02:58:58.896528 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634\": container with ID starting with 37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634 not found: ID does not exist" containerID="37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634" Oct 11 02:58:58 crc kubenswrapper[4953]: I1011 02:58:58.896564 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634"} err="failed to get container status \"37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634\": rpc error: code = NotFound desc = could not find container \"37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634\": container with ID starting with 37ca5c6dee25b30238dbab245d56b5a48f9ace5dd7c7b457b4c54ca7cd1bc634 not found: ID does not exist" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.536288 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg"] Oct 11 02:58:59 crc kubenswrapper[4953]: E1011 02:58:59.536755 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" containerName="route-controller-manager" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.536778 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" containerName="route-controller-manager" Oct 11 02:58:59 crc kubenswrapper[4953]: E1011 02:58:59.536827 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerName="controller-manager" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.536840 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerName="controller-manager" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.537051 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" containerName="route-controller-manager" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.537078 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" containerName="controller-manager" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.537803 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.541447 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.541717 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.541977 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.542927 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.543250 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.543428 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-84cd89794f-2pkjg"] Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.544419 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.547826 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.548314 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.548639 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.550508 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.550776 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.550986 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.551257 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.557865 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg"] Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.561322 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.567052 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84cd89794f-2pkjg"] Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575689 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-client-ca\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575770 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvpx8\" (UniqueName: \"kubernetes.io/projected/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-kube-api-access-xvpx8\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575796 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/087309c7-add7-4339-8c26-eee291fbaf5f-serving-cert\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575825 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-proxy-ca-bundles\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575872 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z54xr\" (UniqueName: \"kubernetes.io/projected/087309c7-add7-4339-8c26-eee291fbaf5f-kube-api-access-z54xr\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575895 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-config\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575941 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-serving-cert\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.575970 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-client-ca\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.576001 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-config\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678210 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z54xr\" (UniqueName: \"kubernetes.io/projected/087309c7-add7-4339-8c26-eee291fbaf5f-kube-api-access-z54xr\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678323 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-config\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678376 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-serving-cert\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678403 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-client-ca\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678434 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-config\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678476 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-client-ca\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678552 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvpx8\" (UniqueName: \"kubernetes.io/projected/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-kube-api-access-xvpx8\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.678576 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/087309c7-add7-4339-8c26-eee291fbaf5f-serving-cert\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.679893 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-client-ca\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.679918 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-client-ca\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.679972 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-config\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.679983 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-proxy-ca-bundles\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.680510 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-config\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.680882 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/087309c7-add7-4339-8c26-eee291fbaf5f-proxy-ca-bundles\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.682923 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-serving-cert\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.683082 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/087309c7-add7-4339-8c26-eee291fbaf5f-serving-cert\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.699192 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvpx8\" (UniqueName: \"kubernetes.io/projected/f6e4a65b-5a38-4bb8-97d2-5df34a52e386-kube-api-access-xvpx8\") pod \"route-controller-manager-68dd7b9488-fq6pg\" (UID: \"f6e4a65b-5a38-4bb8-97d2-5df34a52e386\") " pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.699784 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z54xr\" (UniqueName: \"kubernetes.io/projected/087309c7-add7-4339-8c26-eee291fbaf5f-kube-api-access-z54xr\") pod \"controller-manager-84cd89794f-2pkjg\" (UID: \"087309c7-add7-4339-8c26-eee291fbaf5f\") " pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.808074 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db13bfc-b49d-49d5-b055-2befef69d136" path="/var/lib/kubelet/pods/2db13bfc-b49d-49d5-b055-2befef69d136/volumes" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.808897 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="def79d52-fc53-4ab1-81c4-d67959b5865f" path="/var/lib/kubelet/pods/def79d52-fc53-4ab1-81c4-d67959b5865f/volumes" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.862045 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:58:59 crc kubenswrapper[4953]: I1011 02:58:59.877512 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.313200 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84cd89794f-2pkjg"] Oct 11 02:59:00 crc kubenswrapper[4953]: W1011 02:59:00.320646 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod087309c7_add7_4339_8c26_eee291fbaf5f.slice/crio-5830b6c75a4b185ee0038a896dc093faa94bec6a28a576da5de7095ca72d9a15 WatchSource:0}: Error finding container 5830b6c75a4b185ee0038a896dc093faa94bec6a28a576da5de7095ca72d9a15: Status 404 returned error can't find the container with id 5830b6c75a4b185ee0038a896dc093faa94bec6a28a576da5de7095ca72d9a15 Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.368326 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg"] Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.858202 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" event={"ID":"087309c7-add7-4339-8c26-eee291fbaf5f","Type":"ContainerStarted","Data":"ad612b76e9b165c30f14304d08de6ba83ef3c8a34b4b0eda051e16618efa62d2"} Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.858593 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.858628 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" event={"ID":"087309c7-add7-4339-8c26-eee291fbaf5f","Type":"ContainerStarted","Data":"5830b6c75a4b185ee0038a896dc093faa94bec6a28a576da5de7095ca72d9a15"} Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.860432 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" event={"ID":"f6e4a65b-5a38-4bb8-97d2-5df34a52e386","Type":"ContainerStarted","Data":"c2b87c075434d3edc21544b583d000f138e24bb5fd9c8da942807f78457f9145"} Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.860497 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" event={"ID":"f6e4a65b-5a38-4bb8-97d2-5df34a52e386","Type":"ContainerStarted","Data":"69ac89ae296f09b9e7df0c526229a7ba7f20207b95d0d9cd80bea29c668227c1"} Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.861012 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.864210 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.871521 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.905832 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-68dd7b9488-fq6pg" podStartSLOduration=3.905813107 podStartE2EDuration="3.905813107s" podCreationTimestamp="2025-10-11 02:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:59:00.905174281 +0000 UTC m=+751.838261945" watchObservedRunningTime="2025-10-11 02:59:00.905813107 +0000 UTC m=+751.838900771" Oct 11 02:59:00 crc kubenswrapper[4953]: I1011 02:59:00.910313 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-84cd89794f-2pkjg" podStartSLOduration=3.910301881 podStartE2EDuration="3.910301881s" podCreationTimestamp="2025-10-11 02:58:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 02:59:00.878072691 +0000 UTC m=+751.811160335" watchObservedRunningTime="2025-10-11 02:59:00.910301881 +0000 UTC m=+751.843389515" Oct 11 02:59:02 crc kubenswrapper[4953]: I1011 02:59:02.379519 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-mpn4b" Oct 11 02:59:02 crc kubenswrapper[4953]: I1011 02:59:02.404258 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t96g6" Oct 11 02:59:02 crc kubenswrapper[4953]: I1011 02:59:02.489202 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-gfm9p" Oct 11 02:59:03 crc kubenswrapper[4953]: I1011 02:59:03.974350 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-9mcll" Oct 11 02:59:05 crc kubenswrapper[4953]: I1011 02:59:05.541177 4953 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.055708 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-xlp2r"] Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.057175 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.059556 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.060230 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.066626 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-xlp2r"] Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.087544 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92bh6\" (UniqueName: \"kubernetes.io/projected/0811c9e7-bb25-4223-b855-4b14af1fae71-kube-api-access-92bh6\") pod \"openstack-operator-index-xlp2r\" (UID: \"0811c9e7-bb25-4223-b855-4b14af1fae71\") " pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.189391 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92bh6\" (UniqueName: \"kubernetes.io/projected/0811c9e7-bb25-4223-b855-4b14af1fae71-kube-api-access-92bh6\") pod \"openstack-operator-index-xlp2r\" (UID: \"0811c9e7-bb25-4223-b855-4b14af1fae71\") " pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.211351 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92bh6\" (UniqueName: \"kubernetes.io/projected/0811c9e7-bb25-4223-b855-4b14af1fae71-kube-api-access-92bh6\") pod \"openstack-operator-index-xlp2r\" (UID: \"0811c9e7-bb25-4223-b855-4b14af1fae71\") " pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.379955 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.834636 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-xlp2r"] Oct 11 02:59:07 crc kubenswrapper[4953]: I1011 02:59:07.902077 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-xlp2r" event={"ID":"0811c9e7-bb25-4223-b855-4b14af1fae71","Type":"ContainerStarted","Data":"a79af2397424224914b7123f3928c2b4445eca9144f6f1bdf9dd81bb78281a6d"} Oct 11 02:59:10 crc kubenswrapper[4953]: I1011 02:59:10.923499 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-xlp2r" event={"ID":"0811c9e7-bb25-4223-b855-4b14af1fae71","Type":"ContainerStarted","Data":"7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f"} Oct 11 02:59:10 crc kubenswrapper[4953]: I1011 02:59:10.941481 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-xlp2r" podStartSLOduration=1.427096665 podStartE2EDuration="3.941462522s" podCreationTimestamp="2025-10-11 02:59:07 +0000 UTC" firstStartedPulling="2025-10-11 02:59:07.847470359 +0000 UTC m=+758.780558003" lastFinishedPulling="2025-10-11 02:59:10.361836216 +0000 UTC m=+761.294923860" observedRunningTime="2025-10-11 02:59:10.938373994 +0000 UTC m=+761.871461668" watchObservedRunningTime="2025-10-11 02:59:10.941462522 +0000 UTC m=+761.874550156" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.029759 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-xlp2r"] Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.316495 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.316557 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.636397 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wcw9m"] Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.637699 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.648379 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-p9ql6" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.651700 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wcw9m"] Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.765473 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tplxb\" (UniqueName: \"kubernetes.io/projected/b1afeedb-2499-481f-be28-8e17c9857592-kube-api-access-tplxb\") pod \"openstack-operator-index-wcw9m\" (UID: \"b1afeedb-2499-481f-be28-8e17c9857592\") " pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.867368 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tplxb\" (UniqueName: \"kubernetes.io/projected/b1afeedb-2499-481f-be28-8e17c9857592-kube-api-access-tplxb\") pod \"openstack-operator-index-wcw9m\" (UID: \"b1afeedb-2499-481f-be28-8e17c9857592\") " pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.886799 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tplxb\" (UniqueName: \"kubernetes.io/projected/b1afeedb-2499-481f-be28-8e17c9857592-kube-api-access-tplxb\") pod \"openstack-operator-index-wcw9m\" (UID: \"b1afeedb-2499-481f-be28-8e17c9857592\") " pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:11 crc kubenswrapper[4953]: I1011 02:59:11.972889 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:12 crc kubenswrapper[4953]: I1011 02:59:12.433777 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wcw9m"] Oct 11 02:59:12 crc kubenswrapper[4953]: I1011 02:59:12.937346 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wcw9m" event={"ID":"b1afeedb-2499-481f-be28-8e17c9857592","Type":"ContainerStarted","Data":"a024ac3209359f7668d1f0eef835dfa1f45e787c1e8709df70d0045ccaa878da"} Oct 11 02:59:12 crc kubenswrapper[4953]: I1011 02:59:12.937891 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wcw9m" event={"ID":"b1afeedb-2499-481f-be28-8e17c9857592","Type":"ContainerStarted","Data":"965c8a33ed412288eb306550aaf29565c7104aba60143e30c09ac0648efe00d1"} Oct 11 02:59:12 crc kubenswrapper[4953]: I1011 02:59:12.937525 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-xlp2r" podUID="0811c9e7-bb25-4223-b855-4b14af1fae71" containerName="registry-server" containerID="cri-o://7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f" gracePeriod=2 Oct 11 02:59:12 crc kubenswrapper[4953]: I1011 02:59:12.958251 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wcw9m" podStartSLOduration=1.9071200209999999 podStartE2EDuration="1.958235072s" podCreationTimestamp="2025-10-11 02:59:11 +0000 UTC" firstStartedPulling="2025-10-11 02:59:12.448725223 +0000 UTC m=+763.381812867" lastFinishedPulling="2025-10-11 02:59:12.499840274 +0000 UTC m=+763.432927918" observedRunningTime="2025-10-11 02:59:12.954417105 +0000 UTC m=+763.887504749" watchObservedRunningTime="2025-10-11 02:59:12.958235072 +0000 UTC m=+763.891322706" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.336285 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.488338 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92bh6\" (UniqueName: \"kubernetes.io/projected/0811c9e7-bb25-4223-b855-4b14af1fae71-kube-api-access-92bh6\") pod \"0811c9e7-bb25-4223-b855-4b14af1fae71\" (UID: \"0811c9e7-bb25-4223-b855-4b14af1fae71\") " Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.494821 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0811c9e7-bb25-4223-b855-4b14af1fae71-kube-api-access-92bh6" (OuterVolumeSpecName: "kube-api-access-92bh6") pod "0811c9e7-bb25-4223-b855-4b14af1fae71" (UID: "0811c9e7-bb25-4223-b855-4b14af1fae71"). InnerVolumeSpecName "kube-api-access-92bh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.589760 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92bh6\" (UniqueName: \"kubernetes.io/projected/0811c9e7-bb25-4223-b855-4b14af1fae71-kube-api-access-92bh6\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.943754 4953 generic.go:334] "Generic (PLEG): container finished" podID="0811c9e7-bb25-4223-b855-4b14af1fae71" containerID="7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f" exitCode=0 Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.943800 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-xlp2r" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.943841 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-xlp2r" event={"ID":"0811c9e7-bb25-4223-b855-4b14af1fae71","Type":"ContainerDied","Data":"7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f"} Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.943876 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-xlp2r" event={"ID":"0811c9e7-bb25-4223-b855-4b14af1fae71","Type":"ContainerDied","Data":"a79af2397424224914b7123f3928c2b4445eca9144f6f1bdf9dd81bb78281a6d"} Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.943897 4953 scope.go:117] "RemoveContainer" containerID="7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.959141 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-xlp2r"] Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.963038 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-xlp2r"] Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.964128 4953 scope.go:117] "RemoveContainer" containerID="7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f" Oct 11 02:59:13 crc kubenswrapper[4953]: E1011 02:59:13.964583 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f\": container with ID starting with 7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f not found: ID does not exist" containerID="7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f" Oct 11 02:59:13 crc kubenswrapper[4953]: I1011 02:59:13.964661 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f"} err="failed to get container status \"7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f\": rpc error: code = NotFound desc = could not find container \"7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f\": container with ID starting with 7d9748aba0665ddee08dde466af3741f3802bf5ab477a912a1f85994675d469f not found: ID does not exist" Oct 11 02:59:15 crc kubenswrapper[4953]: I1011 02:59:15.811481 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0811c9e7-bb25-4223-b855-4b14af1fae71" path="/var/lib/kubelet/pods/0811c9e7-bb25-4223-b855-4b14af1fae71/volumes" Oct 11 02:59:21 crc kubenswrapper[4953]: I1011 02:59:21.973035 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:21 crc kubenswrapper[4953]: I1011 02:59:21.973385 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:22 crc kubenswrapper[4953]: I1011 02:59:22.011056 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:22 crc kubenswrapper[4953]: I1011 02:59:22.036297 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-wcw9m" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.480637 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm"] Oct 11 02:59:23 crc kubenswrapper[4953]: E1011 02:59:23.481361 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0811c9e7-bb25-4223-b855-4b14af1fae71" containerName="registry-server" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.481380 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="0811c9e7-bb25-4223-b855-4b14af1fae71" containerName="registry-server" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.481516 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="0811c9e7-bb25-4223-b855-4b14af1fae71" containerName="registry-server" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.482595 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.485481 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-lxq7j" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.493243 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm"] Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.558346 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-bundle\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.558407 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-util\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.558450 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr8vg\" (UniqueName: \"kubernetes.io/projected/7fb71986-972b-4cd1-a70c-03573869886e-kube-api-access-wr8vg\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.659734 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-bundle\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.660077 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-util\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.660196 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr8vg\" (UniqueName: \"kubernetes.io/projected/7fb71986-972b-4cd1-a70c-03573869886e-kube-api-access-wr8vg\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.660305 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-bundle\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.660382 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-util\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.683024 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr8vg\" (UniqueName: \"kubernetes.io/projected/7fb71986-972b-4cd1-a70c-03573869886e-kube-api-access-wr8vg\") pod \"e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:23 crc kubenswrapper[4953]: I1011 02:59:23.809288 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.229644 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm"] Oct 11 02:59:24 crc kubenswrapper[4953]: W1011 02:59:24.236066 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fb71986_972b_4cd1_a70c_03573869886e.slice/crio-ddfc963f159258a837397d35fc1a4bb77f9d62b466665b277b653f75d6a2bdd0 WatchSource:0}: Error finding container ddfc963f159258a837397d35fc1a4bb77f9d62b466665b277b653f75d6a2bdd0: Status 404 returned error can't find the container with id ddfc963f159258a837397d35fc1a4bb77f9d62b466665b277b653f75d6a2bdd0 Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.853934 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-67xhl"] Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.857191 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.866814 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-67xhl"] Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.980912 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-utilities\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.980997 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj4rd\" (UniqueName: \"kubernetes.io/projected/02daa440-ea7b-4645-9944-2a3e52d6403c-kube-api-access-fj4rd\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:24 crc kubenswrapper[4953]: I1011 02:59:24.981142 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-catalog-content\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.036421 4953 generic.go:334] "Generic (PLEG): container finished" podID="7fb71986-972b-4cd1-a70c-03573869886e" containerID="74d475b60c423dd02b75a4ccf97019d188fa6397457b5d4253ec7b590bae6a9e" exitCode=0 Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.036469 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" event={"ID":"7fb71986-972b-4cd1-a70c-03573869886e","Type":"ContainerDied","Data":"74d475b60c423dd02b75a4ccf97019d188fa6397457b5d4253ec7b590bae6a9e"} Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.036496 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" event={"ID":"7fb71986-972b-4cd1-a70c-03573869886e","Type":"ContainerStarted","Data":"ddfc963f159258a837397d35fc1a4bb77f9d62b466665b277b653f75d6a2bdd0"} Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.081928 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-catalog-content\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.082005 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-utilities\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.082036 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj4rd\" (UniqueName: \"kubernetes.io/projected/02daa440-ea7b-4645-9944-2a3e52d6403c-kube-api-access-fj4rd\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.082535 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-catalog-content\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.082731 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-utilities\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.102804 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj4rd\" (UniqueName: \"kubernetes.io/projected/02daa440-ea7b-4645-9944-2a3e52d6403c-kube-api-access-fj4rd\") pod \"community-operators-67xhl\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.178585 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:25 crc kubenswrapper[4953]: I1011 02:59:25.738225 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-67xhl"] Oct 11 02:59:25 crc kubenswrapper[4953]: W1011 02:59:25.747838 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02daa440_ea7b_4645_9944_2a3e52d6403c.slice/crio-93800b66fb4f1f88ff5241c7523642f527abcd8026133a59821ae56ec03ca321 WatchSource:0}: Error finding container 93800b66fb4f1f88ff5241c7523642f527abcd8026133a59821ae56ec03ca321: Status 404 returned error can't find the container with id 93800b66fb4f1f88ff5241c7523642f527abcd8026133a59821ae56ec03ca321 Oct 11 02:59:26 crc kubenswrapper[4953]: I1011 02:59:26.054173 4953 generic.go:334] "Generic (PLEG): container finished" podID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerID="ef441a1ad0581a919e1a077b11f2bf15c5fda96c66c957fa79d2e40f3cf8ab86" exitCode=0 Oct 11 02:59:26 crc kubenswrapper[4953]: I1011 02:59:26.054230 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerDied","Data":"ef441a1ad0581a919e1a077b11f2bf15c5fda96c66c957fa79d2e40f3cf8ab86"} Oct 11 02:59:26 crc kubenswrapper[4953]: I1011 02:59:26.055404 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerStarted","Data":"93800b66fb4f1f88ff5241c7523642f527abcd8026133a59821ae56ec03ca321"} Oct 11 02:59:26 crc kubenswrapper[4953]: I1011 02:59:26.062080 4953 generic.go:334] "Generic (PLEG): container finished" podID="7fb71986-972b-4cd1-a70c-03573869886e" containerID="fdfffcfabc425eacad6f095c4ac38b554286cdaab05b5128b09699273c78b43a" exitCode=0 Oct 11 02:59:26 crc kubenswrapper[4953]: I1011 02:59:26.062172 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" event={"ID":"7fb71986-972b-4cd1-a70c-03573869886e","Type":"ContainerDied","Data":"fdfffcfabc425eacad6f095c4ac38b554286cdaab05b5128b09699273c78b43a"} Oct 11 02:59:27 crc kubenswrapper[4953]: I1011 02:59:27.072404 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerStarted","Data":"9616bd0b0ede877b036484dbb5f087a6e439ed8ca9b682b356960bae629164f3"} Oct 11 02:59:27 crc kubenswrapper[4953]: I1011 02:59:27.076580 4953 generic.go:334] "Generic (PLEG): container finished" podID="7fb71986-972b-4cd1-a70c-03573869886e" containerID="98eb1c43763c5bd4dc07ba9ae991103fcbcf4684d3480ce0e14896cf75ff2e46" exitCode=0 Oct 11 02:59:27 crc kubenswrapper[4953]: I1011 02:59:27.076641 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" event={"ID":"7fb71986-972b-4cd1-a70c-03573869886e","Type":"ContainerDied","Data":"98eb1c43763c5bd4dc07ba9ae991103fcbcf4684d3480ce0e14896cf75ff2e46"} Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.085979 4953 generic.go:334] "Generic (PLEG): container finished" podID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerID="9616bd0b0ede877b036484dbb5f087a6e439ed8ca9b682b356960bae629164f3" exitCode=0 Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.086067 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerDied","Data":"9616bd0b0ede877b036484dbb5f087a6e439ed8ca9b682b356960bae629164f3"} Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.530812 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.639749 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr8vg\" (UniqueName: \"kubernetes.io/projected/7fb71986-972b-4cd1-a70c-03573869886e-kube-api-access-wr8vg\") pod \"7fb71986-972b-4cd1-a70c-03573869886e\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.639799 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-util\") pod \"7fb71986-972b-4cd1-a70c-03573869886e\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.639847 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-bundle\") pod \"7fb71986-972b-4cd1-a70c-03573869886e\" (UID: \"7fb71986-972b-4cd1-a70c-03573869886e\") " Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.640719 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-bundle" (OuterVolumeSpecName: "bundle") pod "7fb71986-972b-4cd1-a70c-03573869886e" (UID: "7fb71986-972b-4cd1-a70c-03573869886e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.648061 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fb71986-972b-4cd1-a70c-03573869886e-kube-api-access-wr8vg" (OuterVolumeSpecName: "kube-api-access-wr8vg") pod "7fb71986-972b-4cd1-a70c-03573869886e" (UID: "7fb71986-972b-4cd1-a70c-03573869886e"). InnerVolumeSpecName "kube-api-access-wr8vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.674296 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-util" (OuterVolumeSpecName: "util") pod "7fb71986-972b-4cd1-a70c-03573869886e" (UID: "7fb71986-972b-4cd1-a70c-03573869886e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.741252 4953 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.741293 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr8vg\" (UniqueName: \"kubernetes.io/projected/7fb71986-972b-4cd1-a70c-03573869886e-kube-api-access-wr8vg\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:28 crc kubenswrapper[4953]: I1011 02:59:28.741308 4953 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7fb71986-972b-4cd1-a70c-03573869886e-util\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:29 crc kubenswrapper[4953]: I1011 02:59:29.099154 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerStarted","Data":"99ed45ec41d055a5a0b783d7b90346e89a36c6214ca052cf2685e141ed552c86"} Oct 11 02:59:29 crc kubenswrapper[4953]: I1011 02:59:29.104205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" event={"ID":"7fb71986-972b-4cd1-a70c-03573869886e","Type":"ContainerDied","Data":"ddfc963f159258a837397d35fc1a4bb77f9d62b466665b277b653f75d6a2bdd0"} Oct 11 02:59:29 crc kubenswrapper[4953]: I1011 02:59:29.104250 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddfc963f159258a837397d35fc1a4bb77f9d62b466665b277b653f75d6a2bdd0" Oct 11 02:59:29 crc kubenswrapper[4953]: I1011 02:59:29.104326 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm" Oct 11 02:59:29 crc kubenswrapper[4953]: I1011 02:59:29.577342 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-67xhl" podStartSLOduration=2.895820009 podStartE2EDuration="5.577314631s" podCreationTimestamp="2025-10-11 02:59:24 +0000 UTC" firstStartedPulling="2025-10-11 02:59:26.057170869 +0000 UTC m=+776.990258533" lastFinishedPulling="2025-10-11 02:59:28.738665471 +0000 UTC m=+779.671753155" observedRunningTime="2025-10-11 02:59:29.13733854 +0000 UTC m=+780.070426204" watchObservedRunningTime="2025-10-11 02:59:29.577314631 +0000 UTC m=+780.510402295" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.179381 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.179724 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.254878 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.975539 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg"] Oct 11 02:59:35 crc kubenswrapper[4953]: E1011 02:59:35.975897 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="pull" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.975916 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="pull" Oct 11 02:59:35 crc kubenswrapper[4953]: E1011 02:59:35.975943 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="extract" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.975952 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="extract" Oct 11 02:59:35 crc kubenswrapper[4953]: E1011 02:59:35.975969 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="util" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.975978 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="util" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.976160 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fb71986-972b-4cd1-a70c-03573869886e" containerName="extract" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.977070 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.979515 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-sj6zk" Oct 11 02:59:35 crc kubenswrapper[4953]: I1011 02:59:35.995761 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg"] Oct 11 02:59:36 crc kubenswrapper[4953]: I1011 02:59:36.149850 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqk74\" (UniqueName: \"kubernetes.io/projected/40ef9bae-babe-4b3b-920b-7e0597df1221-kube-api-access-jqk74\") pod \"openstack-operator-controller-operator-546b8c9657-c9vtg\" (UID: \"40ef9bae-babe-4b3b-920b-7e0597df1221\") " pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:36 crc kubenswrapper[4953]: I1011 02:59:36.189250 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:36 crc kubenswrapper[4953]: I1011 02:59:36.251263 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqk74\" (UniqueName: \"kubernetes.io/projected/40ef9bae-babe-4b3b-920b-7e0597df1221-kube-api-access-jqk74\") pod \"openstack-operator-controller-operator-546b8c9657-c9vtg\" (UID: \"40ef9bae-babe-4b3b-920b-7e0597df1221\") " pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:36 crc kubenswrapper[4953]: I1011 02:59:36.268331 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqk74\" (UniqueName: \"kubernetes.io/projected/40ef9bae-babe-4b3b-920b-7e0597df1221-kube-api-access-jqk74\") pod \"openstack-operator-controller-operator-546b8c9657-c9vtg\" (UID: \"40ef9bae-babe-4b3b-920b-7e0597df1221\") " pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:36 crc kubenswrapper[4953]: I1011 02:59:36.297175 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:36 crc kubenswrapper[4953]: I1011 02:59:36.740312 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg"] Oct 11 02:59:37 crc kubenswrapper[4953]: I1011 02:59:37.159162 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" event={"ID":"40ef9bae-babe-4b3b-920b-7e0597df1221","Type":"ContainerStarted","Data":"a01730d42f171e52dba174fb63478b433259544e62b5a20d5569e90e8304449d"} Oct 11 02:59:37 crc kubenswrapper[4953]: I1011 02:59:37.548962 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-67xhl"] Oct 11 02:59:38 crc kubenswrapper[4953]: I1011 02:59:38.166240 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-67xhl" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="registry-server" containerID="cri-o://99ed45ec41d055a5a0b783d7b90346e89a36c6214ca052cf2685e141ed552c86" gracePeriod=2 Oct 11 02:59:39 crc kubenswrapper[4953]: I1011 02:59:39.174578 4953 generic.go:334] "Generic (PLEG): container finished" podID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerID="99ed45ec41d055a5a0b783d7b90346e89a36c6214ca052cf2685e141ed552c86" exitCode=0 Oct 11 02:59:39 crc kubenswrapper[4953]: I1011 02:59:39.174649 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerDied","Data":"99ed45ec41d055a5a0b783d7b90346e89a36c6214ca052cf2685e141ed552c86"} Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.217688 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.218011 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-67xhl" event={"ID":"02daa440-ea7b-4645-9944-2a3e52d6403c","Type":"ContainerDied","Data":"93800b66fb4f1f88ff5241c7523642f527abcd8026133a59821ae56ec03ca321"} Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.218045 4953 scope.go:117] "RemoveContainer" containerID="99ed45ec41d055a5a0b783d7b90346e89a36c6214ca052cf2685e141ed552c86" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.317776 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj4rd\" (UniqueName: \"kubernetes.io/projected/02daa440-ea7b-4645-9944-2a3e52d6403c-kube-api-access-fj4rd\") pod \"02daa440-ea7b-4645-9944-2a3e52d6403c\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.317878 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-utilities\") pod \"02daa440-ea7b-4645-9944-2a3e52d6403c\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.317920 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-catalog-content\") pod \"02daa440-ea7b-4645-9944-2a3e52d6403c\" (UID: \"02daa440-ea7b-4645-9944-2a3e52d6403c\") " Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.319760 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-utilities" (OuterVolumeSpecName: "utilities") pod "02daa440-ea7b-4645-9944-2a3e52d6403c" (UID: "02daa440-ea7b-4645-9944-2a3e52d6403c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.344840 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02daa440-ea7b-4645-9944-2a3e52d6403c-kube-api-access-fj4rd" (OuterVolumeSpecName: "kube-api-access-fj4rd") pod "02daa440-ea7b-4645-9944-2a3e52d6403c" (UID: "02daa440-ea7b-4645-9944-2a3e52d6403c"). InnerVolumeSpecName "kube-api-access-fj4rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.388063 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02daa440-ea7b-4645-9944-2a3e52d6403c" (UID: "02daa440-ea7b-4645-9944-2a3e52d6403c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.418550 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj4rd\" (UniqueName: \"kubernetes.io/projected/02daa440-ea7b-4645-9944-2a3e52d6403c-kube-api-access-fj4rd\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.418585 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.418596 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02daa440-ea7b-4645-9944-2a3e52d6403c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.450580 4953 scope.go:117] "RemoveContainer" containerID="9616bd0b0ede877b036484dbb5f087a6e439ed8ca9b682b356960bae629164f3" Oct 11 02:59:40 crc kubenswrapper[4953]: I1011 02:59:40.466135 4953 scope.go:117] "RemoveContainer" containerID="ef441a1ad0581a919e1a077b11f2bf15c5fda96c66c957fa79d2e40f3cf8ab86" Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.226251 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" event={"ID":"40ef9bae-babe-4b3b-920b-7e0597df1221","Type":"ContainerStarted","Data":"84924cb1d06d7e93c53600260119c73765907e2c01482f1e76e8f9ae0f712a33"} Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.228903 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-67xhl" Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.279019 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-67xhl"] Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.283981 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-67xhl"] Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.316804 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.316871 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.316923 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.318291 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac4cee146fbc1ec5589f7b5615f6ba984aa69377969ce0ce9a9209a5d1c58d45"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.318394 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://ac4cee146fbc1ec5589f7b5615f6ba984aa69377969ce0ce9a9209a5d1c58d45" gracePeriod=600 Oct 11 02:59:41 crc kubenswrapper[4953]: I1011 02:59:41.808041 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" path="/var/lib/kubelet/pods/02daa440-ea7b-4645-9944-2a3e52d6403c/volumes" Oct 11 02:59:42 crc kubenswrapper[4953]: I1011 02:59:42.244010 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="ac4cee146fbc1ec5589f7b5615f6ba984aa69377969ce0ce9a9209a5d1c58d45" exitCode=0 Oct 11 02:59:42 crc kubenswrapper[4953]: I1011 02:59:42.244118 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"ac4cee146fbc1ec5589f7b5615f6ba984aa69377969ce0ce9a9209a5d1c58d45"} Oct 11 02:59:42 crc kubenswrapper[4953]: I1011 02:59:42.244162 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"7c2039bb1b5e9ebe18e8fb7946b709aaacc565fd9ec85d3707d854610d50c294"} Oct 11 02:59:42 crc kubenswrapper[4953]: I1011 02:59:42.244190 4953 scope.go:117] "RemoveContainer" containerID="0511f13731843b9979d221390934cd551a891a79ef8451ae3f30ad1f7c42593a" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.253961 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" event={"ID":"40ef9bae-babe-4b3b-920b-7e0597df1221","Type":"ContainerStarted","Data":"5c31b04380b1a7c7f3774d794a032f7b5dccc3203a7db8b5a5b83d3146852260"} Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.254664 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.299518 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" podStartSLOduration=1.970609724 podStartE2EDuration="8.299498388s" podCreationTimestamp="2025-10-11 02:59:35 +0000 UTC" firstStartedPulling="2025-10-11 02:59:36.74948025 +0000 UTC m=+787.682567904" lastFinishedPulling="2025-10-11 02:59:43.078368924 +0000 UTC m=+794.011456568" observedRunningTime="2025-10-11 02:59:43.293897415 +0000 UTC m=+794.226985059" watchObservedRunningTime="2025-10-11 02:59:43.299498388 +0000 UTC m=+794.232586042" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.954110 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kntkk"] Oct 11 02:59:43 crc kubenswrapper[4953]: E1011 02:59:43.954711 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="extract-content" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.954725 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="extract-content" Oct 11 02:59:43 crc kubenswrapper[4953]: E1011 02:59:43.954741 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="extract-utilities" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.954749 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="extract-utilities" Oct 11 02:59:43 crc kubenswrapper[4953]: E1011 02:59:43.954772 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="registry-server" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.954780 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="registry-server" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.954905 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="02daa440-ea7b-4645-9944-2a3e52d6403c" containerName="registry-server" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.955887 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.976525 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntkk"] Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.977320 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5szdr\" (UniqueName: \"kubernetes.io/projected/163a25df-6ebc-45c7-94dc-5aef7fab57a3-kube-api-access-5szdr\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.977411 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-catalog-content\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:43 crc kubenswrapper[4953]: I1011 02:59:43.977538 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-utilities\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.078773 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5szdr\" (UniqueName: \"kubernetes.io/projected/163a25df-6ebc-45c7-94dc-5aef7fab57a3-kube-api-access-5szdr\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.078827 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-catalog-content\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.078880 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-utilities\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.079354 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-utilities\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.079433 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-catalog-content\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.099255 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5szdr\" (UniqueName: \"kubernetes.io/projected/163a25df-6ebc-45c7-94dc-5aef7fab57a3-kube-api-access-5szdr\") pod \"redhat-marketplace-kntkk\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.278097 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:44 crc kubenswrapper[4953]: I1011 02:59:44.771194 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntkk"] Oct 11 02:59:45 crc kubenswrapper[4953]: I1011 02:59:45.270670 4953 generic.go:334] "Generic (PLEG): container finished" podID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerID="ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1" exitCode=0 Oct 11 02:59:45 crc kubenswrapper[4953]: I1011 02:59:45.270719 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntkk" event={"ID":"163a25df-6ebc-45c7-94dc-5aef7fab57a3","Type":"ContainerDied","Data":"ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1"} Oct 11 02:59:45 crc kubenswrapper[4953]: I1011 02:59:45.270953 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntkk" event={"ID":"163a25df-6ebc-45c7-94dc-5aef7fab57a3","Type":"ContainerStarted","Data":"adb506af39fe6d38c134d4288e1b863f0e666236e0f554c204291aca2fc9cf6a"} Oct 11 02:59:46 crc kubenswrapper[4953]: I1011 02:59:46.278932 4953 generic.go:334] "Generic (PLEG): container finished" podID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerID="1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec" exitCode=0 Oct 11 02:59:46 crc kubenswrapper[4953]: I1011 02:59:46.278993 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntkk" event={"ID":"163a25df-6ebc-45c7-94dc-5aef7fab57a3","Type":"ContainerDied","Data":"1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec"} Oct 11 02:59:46 crc kubenswrapper[4953]: I1011 02:59:46.301130 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-546b8c9657-c9vtg" Oct 11 02:59:47 crc kubenswrapper[4953]: I1011 02:59:47.286262 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntkk" event={"ID":"163a25df-6ebc-45c7-94dc-5aef7fab57a3","Type":"ContainerStarted","Data":"fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9"} Oct 11 02:59:47 crc kubenswrapper[4953]: I1011 02:59:47.302727 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kntkk" podStartSLOduration=2.807850208 podStartE2EDuration="4.302706852s" podCreationTimestamp="2025-10-11 02:59:43 +0000 UTC" firstStartedPulling="2025-10-11 02:59:45.272029322 +0000 UTC m=+796.205116966" lastFinishedPulling="2025-10-11 02:59:46.766885956 +0000 UTC m=+797.699973610" observedRunningTime="2025-10-11 02:59:47.302155498 +0000 UTC m=+798.235243142" watchObservedRunningTime="2025-10-11 02:59:47.302706852 +0000 UTC m=+798.235794496" Oct 11 02:59:54 crc kubenswrapper[4953]: I1011 02:59:54.279151 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:54 crc kubenswrapper[4953]: I1011 02:59:54.280720 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:54 crc kubenswrapper[4953]: I1011 02:59:54.336505 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:54 crc kubenswrapper[4953]: I1011 02:59:54.384880 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:54 crc kubenswrapper[4953]: I1011 02:59:54.569914 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntkk"] Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.348960 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kntkk" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="registry-server" containerID="cri-o://fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9" gracePeriod=2 Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.824894 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.959726 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-utilities\") pod \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.959839 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-catalog-content\") pod \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.959912 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5szdr\" (UniqueName: \"kubernetes.io/projected/163a25df-6ebc-45c7-94dc-5aef7fab57a3-kube-api-access-5szdr\") pod \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\" (UID: \"163a25df-6ebc-45c7-94dc-5aef7fab57a3\") " Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.962001 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-utilities" (OuterVolumeSpecName: "utilities") pod "163a25df-6ebc-45c7-94dc-5aef7fab57a3" (UID: "163a25df-6ebc-45c7-94dc-5aef7fab57a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.973084 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "163a25df-6ebc-45c7-94dc-5aef7fab57a3" (UID: "163a25df-6ebc-45c7-94dc-5aef7fab57a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 02:59:56 crc kubenswrapper[4953]: I1011 02:59:56.974867 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/163a25df-6ebc-45c7-94dc-5aef7fab57a3-kube-api-access-5szdr" (OuterVolumeSpecName: "kube-api-access-5szdr") pod "163a25df-6ebc-45c7-94dc-5aef7fab57a3" (UID: "163a25df-6ebc-45c7-94dc-5aef7fab57a3"). InnerVolumeSpecName "kube-api-access-5szdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.061905 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.061937 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/163a25df-6ebc-45c7-94dc-5aef7fab57a3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.061950 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5szdr\" (UniqueName: \"kubernetes.io/projected/163a25df-6ebc-45c7-94dc-5aef7fab57a3-kube-api-access-5szdr\") on node \"crc\" DevicePath \"\"" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.356985 4953 generic.go:334] "Generic (PLEG): container finished" podID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerID="fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9" exitCode=0 Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.357033 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntkk" event={"ID":"163a25df-6ebc-45c7-94dc-5aef7fab57a3","Type":"ContainerDied","Data":"fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9"} Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.357042 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kntkk" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.357085 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kntkk" event={"ID":"163a25df-6ebc-45c7-94dc-5aef7fab57a3","Type":"ContainerDied","Data":"adb506af39fe6d38c134d4288e1b863f0e666236e0f554c204291aca2fc9cf6a"} Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.357108 4953 scope.go:117] "RemoveContainer" containerID="fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.373766 4953 scope.go:117] "RemoveContainer" containerID="1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.384683 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntkk"] Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.396087 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kntkk"] Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.404359 4953 scope.go:117] "RemoveContainer" containerID="ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.417702 4953 scope.go:117] "RemoveContainer" containerID="fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9" Oct 11 02:59:57 crc kubenswrapper[4953]: E1011 02:59:57.418071 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9\": container with ID starting with fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9 not found: ID does not exist" containerID="fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.418097 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9"} err="failed to get container status \"fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9\": rpc error: code = NotFound desc = could not find container \"fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9\": container with ID starting with fe8bb4fbabed96cc174149475e26fafc359111d6182462ebc3b7992b4a6bc4f9 not found: ID does not exist" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.418119 4953 scope.go:117] "RemoveContainer" containerID="1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec" Oct 11 02:59:57 crc kubenswrapper[4953]: E1011 02:59:57.418298 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec\": container with ID starting with 1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec not found: ID does not exist" containerID="1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.418315 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec"} err="failed to get container status \"1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec\": rpc error: code = NotFound desc = could not find container \"1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec\": container with ID starting with 1c97ea3fde4b6d302a24292cb49de883174fd44269ace29a650f53a9ff4ac2ec not found: ID does not exist" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.418327 4953 scope.go:117] "RemoveContainer" containerID="ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1" Oct 11 02:59:57 crc kubenswrapper[4953]: E1011 02:59:57.418493 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1\": container with ID starting with ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1 not found: ID does not exist" containerID="ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.418508 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1"} err="failed to get container status \"ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1\": rpc error: code = NotFound desc = could not find container \"ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1\": container with ID starting with ba9a1bde0e4f1335df864b32d7d9fd2f864b674d6679b9d2d460634428246cd1 not found: ID does not exist" Oct 11 02:59:57 crc kubenswrapper[4953]: I1011 02:59:57.802925 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" path="/var/lib/kubelet/pods/163a25df-6ebc-45c7-94dc-5aef7fab57a3/volumes" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.131172 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp"] Oct 11 03:00:00 crc kubenswrapper[4953]: E1011 03:00:00.132223 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="registry-server" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.132237 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="registry-server" Oct 11 03:00:00 crc kubenswrapper[4953]: E1011 03:00:00.132260 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="extract-content" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.132266 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="extract-content" Oct 11 03:00:00 crc kubenswrapper[4953]: E1011 03:00:00.132277 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="extract-utilities" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.132283 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="extract-utilities" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.132386 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="163a25df-6ebc-45c7-94dc-5aef7fab57a3" containerName="registry-server" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.132802 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.136054 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.136985 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.146944 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp"] Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.211406 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b70ea1d6-5c99-4c30-8df7-2baff910aeab-secret-volume\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.211531 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxvmg\" (UniqueName: \"kubernetes.io/projected/b70ea1d6-5c99-4c30-8df7-2baff910aeab-kube-api-access-rxvmg\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.211626 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b70ea1d6-5c99-4c30-8df7-2baff910aeab-config-volume\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.312387 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b70ea1d6-5c99-4c30-8df7-2baff910aeab-secret-volume\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.312431 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxvmg\" (UniqueName: \"kubernetes.io/projected/b70ea1d6-5c99-4c30-8df7-2baff910aeab-kube-api-access-rxvmg\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.312456 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b70ea1d6-5c99-4c30-8df7-2baff910aeab-config-volume\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.313501 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b70ea1d6-5c99-4c30-8df7-2baff910aeab-config-volume\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.318857 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b70ea1d6-5c99-4c30-8df7-2baff910aeab-secret-volume\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.338527 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxvmg\" (UniqueName: \"kubernetes.io/projected/b70ea1d6-5c99-4c30-8df7-2baff910aeab-kube-api-access-rxvmg\") pod \"collect-profiles-29335860-zxgtp\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.450034 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:00 crc kubenswrapper[4953]: I1011 03:00:00.875237 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp"] Oct 11 03:00:01 crc kubenswrapper[4953]: I1011 03:00:01.380722 4953 generic.go:334] "Generic (PLEG): container finished" podID="b70ea1d6-5c99-4c30-8df7-2baff910aeab" containerID="e6c796e07f73cbe9408bdd465ec0a1bf7486fd84b3111de90d3494bbba99cea3" exitCode=0 Oct 11 03:00:01 crc kubenswrapper[4953]: I1011 03:00:01.381085 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" event={"ID":"b70ea1d6-5c99-4c30-8df7-2baff910aeab","Type":"ContainerDied","Data":"e6c796e07f73cbe9408bdd465ec0a1bf7486fd84b3111de90d3494bbba99cea3"} Oct 11 03:00:01 crc kubenswrapper[4953]: I1011 03:00:01.381236 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" event={"ID":"b70ea1d6-5c99-4c30-8df7-2baff910aeab","Type":"ContainerStarted","Data":"103616d70c12e0c9241a579f971908eec834770a532176dadbb24908ba2b28ea"} Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.617754 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.619388 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.621900 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.623033 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.624047 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-fc8q4" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.625230 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.626213 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-grbq7" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.634591 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.635539 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.638161 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-wl6m6" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.638369 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.641560 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.642681 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.644956 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-2wmw6" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.655123 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.683108 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.697880 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.698915 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.699495 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.699764 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.707118 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-6z6pf" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.712700 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-4xvcz" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.719158 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.723434 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.734269 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-685c45897-wc8m8"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.735439 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.738143 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.739718 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.747448 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgrm7\" (UniqueName: \"kubernetes.io/projected/86c53eae-1329-4321-86d2-80b140234b48-kube-api-access-jgrm7\") pod \"designate-operator-controller-manager-687df44cdb-tmbxg\" (UID: \"86c53eae-1329-4321-86d2-80b140234b48\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.747505 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7sqd\" (UniqueName: \"kubernetes.io/projected/6fdfc46d-0e5a-4b00-bc31-67beabb8c089-kube-api-access-w7sqd\") pod \"glance-operator-controller-manager-7bb46cd7d-sg8ck\" (UID: \"6fdfc46d-0e5a-4b00-bc31-67beabb8c089\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.747549 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2h7b\" (UniqueName: \"kubernetes.io/projected/c7b14405-4593-4f33-99a7-d40ce066518e-kube-api-access-f2h7b\") pod \"barbican-operator-controller-manager-64f84fcdbb-mndql\" (UID: \"c7b14405-4593-4f33-99a7-d40ce066518e\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.747580 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m54fk\" (UniqueName: \"kubernetes.io/projected/0f923b33-4a14-4290-90f7-7e6cee41df34-kube-api-access-m54fk\") pod \"cinder-operator-controller-manager-6c565d56b4-xjd8m\" (UID: \"0f923b33-4a14-4290-90f7-7e6cee41df34\") " pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.747762 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.748693 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-dddjk" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.749717 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-6mhqb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.763480 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-685c45897-wc8m8"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.783055 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.792249 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.793819 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.798687 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-rgh45"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.800098 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.800264 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-ft8qv" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.803971 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-lhh2b" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.817337 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.830972 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-rgh45"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.840123 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.841453 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.848066 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-mqgr4" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849592 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwk9x\" (UniqueName: \"kubernetes.io/projected/a319207e-2833-4dd6-b9db-60ce94fd41af-kube-api-access-mwk9x\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849647 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2h7b\" (UniqueName: \"kubernetes.io/projected/c7b14405-4593-4f33-99a7-d40ce066518e-kube-api-access-f2h7b\") pod \"barbican-operator-controller-manager-64f84fcdbb-mndql\" (UID: \"c7b14405-4593-4f33-99a7-d40ce066518e\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849682 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvztk\" (UniqueName: \"kubernetes.io/projected/91516d61-8792-4b37-aa49-d72705bae472-kube-api-access-dvztk\") pod \"horizon-operator-controller-manager-6d74794d9b-5cfjb\" (UID: \"91516d61-8792-4b37-aa49-d72705bae472\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849703 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a319207e-2833-4dd6-b9db-60ce94fd41af-cert\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849727 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m54fk\" (UniqueName: \"kubernetes.io/projected/0f923b33-4a14-4290-90f7-7e6cee41df34-kube-api-access-m54fk\") pod \"cinder-operator-controller-manager-6c565d56b4-xjd8m\" (UID: \"0f923b33-4a14-4290-90f7-7e6cee41df34\") " pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849757 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/afa4c58f-9d37-43b6-a7f7-a9d75b68c39c-kube-api-access-5c9jp\") pod \"heat-operator-controller-manager-6d9967f8dd-zwhzq\" (UID: \"afa4c58f-9d37-43b6-a7f7-a9d75b68c39c\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849777 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psdh6\" (UniqueName: \"kubernetes.io/projected/f9afccad-9780-49c8-a7f1-eea5cdf50239-kube-api-access-psdh6\") pod \"ironic-operator-controller-manager-74cb5cbc49-b6fwb\" (UID: \"f9afccad-9780-49c8-a7f1-eea5cdf50239\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849815 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgrm7\" (UniqueName: \"kubernetes.io/projected/86c53eae-1329-4321-86d2-80b140234b48-kube-api-access-jgrm7\") pod \"designate-operator-controller-manager-687df44cdb-tmbxg\" (UID: \"86c53eae-1329-4321-86d2-80b140234b48\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.849843 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7sqd\" (UniqueName: \"kubernetes.io/projected/6fdfc46d-0e5a-4b00-bc31-67beabb8c089-kube-api-access-w7sqd\") pod \"glance-operator-controller-manager-7bb46cd7d-sg8ck\" (UID: \"6fdfc46d-0e5a-4b00-bc31-67beabb8c089\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.860485 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.861804 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.866726 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.867877 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.869699 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-gvzph" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.869952 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-bmnhk" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.886733 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2h7b\" (UniqueName: \"kubernetes.io/projected/c7b14405-4593-4f33-99a7-d40ce066518e-kube-api-access-f2h7b\") pod \"barbican-operator-controller-manager-64f84fcdbb-mndql\" (UID: \"c7b14405-4593-4f33-99a7-d40ce066518e\") " pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.886811 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.894118 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m54fk\" (UniqueName: \"kubernetes.io/projected/0f923b33-4a14-4290-90f7-7e6cee41df34-kube-api-access-m54fk\") pod \"cinder-operator-controller-manager-6c565d56b4-xjd8m\" (UID: \"0f923b33-4a14-4290-90f7-7e6cee41df34\") " pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.904149 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7sqd\" (UniqueName: \"kubernetes.io/projected/6fdfc46d-0e5a-4b00-bc31-67beabb8c089-kube-api-access-w7sqd\") pod \"glance-operator-controller-manager-7bb46cd7d-sg8ck\" (UID: \"6fdfc46d-0e5a-4b00-bc31-67beabb8c089\") " pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.908384 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.909447 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.911999 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-f68jh" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.914277 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgrm7\" (UniqueName: \"kubernetes.io/projected/86c53eae-1329-4321-86d2-80b140234b48-kube-api-access-jgrm7\") pod \"designate-operator-controller-manager-687df44cdb-tmbxg\" (UID: \"86c53eae-1329-4321-86d2-80b140234b48\") " pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.936231 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.940437 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8"] Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.952936 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.969872 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxls6\" (UniqueName: \"kubernetes.io/projected/a95dee0f-7e54-41f1-99d3-3df7a8554793-kube-api-access-bxls6\") pod \"keystone-operator-controller-manager-ddb98f99b-4f4lp\" (UID: \"a95dee0f-7e54-41f1-99d3-3df7a8554793\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.969959 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/afa4c58f-9d37-43b6-a7f7-a9d75b68c39c-kube-api-access-5c9jp\") pod \"heat-operator-controller-manager-6d9967f8dd-zwhzq\" (UID: \"afa4c58f-9d37-43b6-a7f7-a9d75b68c39c\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.969990 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psdh6\" (UniqueName: \"kubernetes.io/projected/f9afccad-9780-49c8-a7f1-eea5cdf50239-kube-api-access-psdh6\") pod \"ironic-operator-controller-manager-74cb5cbc49-b6fwb\" (UID: \"f9afccad-9780-49c8-a7f1-eea5cdf50239\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.970116 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl7mz\" (UniqueName: \"kubernetes.io/projected/94fd5831-788d-4f63-b40b-03f64a627450-kube-api-access-fl7mz\") pod \"manila-operator-controller-manager-59578bc799-rgh45\" (UID: \"94fd5831-788d-4f63-b40b-03f64a627450\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.970209 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwk9x\" (UniqueName: \"kubernetes.io/projected/a319207e-2833-4dd6-b9db-60ce94fd41af-kube-api-access-mwk9x\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.970253 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gpj2\" (UniqueName: \"kubernetes.io/projected/5a21948c-c3c2-45c0-9d3e-9c6d36376990-kube-api-access-4gpj2\") pod \"nova-operator-controller-manager-57bb74c7bf-2jcr8\" (UID: \"5a21948c-c3c2-45c0-9d3e-9c6d36376990\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.970478 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvztk\" (UniqueName: \"kubernetes.io/projected/91516d61-8792-4b37-aa49-d72705bae472-kube-api-access-dvztk\") pod \"horizon-operator-controller-manager-6d74794d9b-5cfjb\" (UID: \"91516d61-8792-4b37-aa49-d72705bae472\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.970513 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a319207e-2833-4dd6-b9db-60ce94fd41af-cert\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.970561 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpqpt\" (UniqueName: \"kubernetes.io/projected/66ec2823-3d89-4b72-81cf-1bb9d6cc4c49-kube-api-access-zpqpt\") pod \"neutron-operator-controller-manager-797d478b46-sq5vt\" (UID: \"66ec2823-3d89-4b72-81cf-1bb9d6cc4c49\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.983783 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:02 crc kubenswrapper[4953]: E1011 03:00:02.984145 4953 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.984306 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:02 crc kubenswrapper[4953]: E1011 03:00:02.984807 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a319207e-2833-4dd6-b9db-60ce94fd41af-cert podName:a319207e-2833-4dd6-b9db-60ce94fd41af nodeName:}" failed. No retries permitted until 2025-10-11 03:00:03.484774988 +0000 UTC m=+814.417862692 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a319207e-2833-4dd6-b9db-60ce94fd41af-cert") pod "infra-operator-controller-manager-685c45897-wc8m8" (UID: "a319207e-2833-4dd6-b9db-60ce94fd41af") : secret "infra-operator-webhook-server-cert" not found Oct 11 03:00:02 crc kubenswrapper[4953]: I1011 03:00:02.997647 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.000021 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.003688 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.004756 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.012992 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-4fmc7" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.013336 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwk9x\" (UniqueName: \"kubernetes.io/projected/a319207e-2833-4dd6-b9db-60ce94fd41af-kube-api-access-mwk9x\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.021045 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.022422 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.023977 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvztk\" (UniqueName: \"kubernetes.io/projected/91516d61-8792-4b37-aa49-d72705bae472-kube-api-access-dvztk\") pod \"horizon-operator-controller-manager-6d74794d9b-5cfjb\" (UID: \"91516d61-8792-4b37-aa49-d72705bae472\") " pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.024361 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/afa4c58f-9d37-43b6-a7f7-a9d75b68c39c-kube-api-access-5c9jp\") pod \"heat-operator-controller-manager-6d9967f8dd-zwhzq\" (UID: \"afa4c58f-9d37-43b6-a7f7-a9d75b68c39c\") " pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.025199 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.025823 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-4gkpq" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.027574 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.029469 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psdh6\" (UniqueName: \"kubernetes.io/projected/f9afccad-9780-49c8-a7f1-eea5cdf50239-kube-api-access-psdh6\") pod \"ironic-operator-controller-manager-74cb5cbc49-b6fwb\" (UID: \"f9afccad-9780-49c8-a7f1-eea5cdf50239\") " pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.036777 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.042917 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.052144 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6"] Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.052775 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70ea1d6-5c99-4c30-8df7-2baff910aeab" containerName="collect-profiles" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.052787 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70ea1d6-5c99-4c30-8df7-2baff910aeab" containerName="collect-profiles" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.052912 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b70ea1d6-5c99-4c30-8df7-2baff910aeab" containerName="collect-profiles" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.053773 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.055687 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-9ptmm" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.058513 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.058842 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.060950 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.064054 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-mm2k4" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.067176 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.072118 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.073281 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl7mz\" (UniqueName: \"kubernetes.io/projected/94fd5831-788d-4f63-b40b-03f64a627450-kube-api-access-fl7mz\") pod \"manila-operator-controller-manager-59578bc799-rgh45\" (UID: \"94fd5831-788d-4f63-b40b-03f64a627450\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.073360 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gpj2\" (UniqueName: \"kubernetes.io/projected/5a21948c-c3c2-45c0-9d3e-9c6d36376990-kube-api-access-4gpj2\") pod \"nova-operator-controller-manager-57bb74c7bf-2jcr8\" (UID: \"5a21948c-c3c2-45c0-9d3e-9c6d36376990\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.073384 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpqpt\" (UniqueName: \"kubernetes.io/projected/66ec2823-3d89-4b72-81cf-1bb9d6cc4c49-kube-api-access-zpqpt\") pod \"neutron-operator-controller-manager-797d478b46-sq5vt\" (UID: \"66ec2823-3d89-4b72-81cf-1bb9d6cc4c49\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.073407 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqmrv\" (UniqueName: \"kubernetes.io/projected/360ca384-6921-424c-abed-01f6c7c0cf47-kube-api-access-jqmrv\") pod \"mariadb-operator-controller-manager-5777b4f897-dbpbb\" (UID: \"360ca384-6921-424c-abed-01f6c7c0cf47\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.073441 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxls6\" (UniqueName: \"kubernetes.io/projected/a95dee0f-7e54-41f1-99d3-3df7a8554793-kube-api-access-bxls6\") pod \"keystone-operator-controller-manager-ddb98f99b-4f4lp\" (UID: \"a95dee0f-7e54-41f1-99d3-3df7a8554793\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.073489 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck4sm\" (UniqueName: \"kubernetes.io/projected/01523da6-2a81-4ce1-9724-0b2f85056158-kube-api-access-ck4sm\") pod \"octavia-operator-controller-manager-6d7c7ddf95-v8xgw\" (UID: \"01523da6-2a81-4ce1-9724-0b2f85056158\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.093781 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.094080 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.097629 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpqpt\" (UniqueName: \"kubernetes.io/projected/66ec2823-3d89-4b72-81cf-1bb9d6cc4c49-kube-api-access-zpqpt\") pod \"neutron-operator-controller-manager-797d478b46-sq5vt\" (UID: \"66ec2823-3d89-4b72-81cf-1bb9d6cc4c49\") " pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.098069 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxls6\" (UniqueName: \"kubernetes.io/projected/a95dee0f-7e54-41f1-99d3-3df7a8554793-kube-api-access-bxls6\") pod \"keystone-operator-controller-manager-ddb98f99b-4f4lp\" (UID: \"a95dee0f-7e54-41f1-99d3-3df7a8554793\") " pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.103738 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.104862 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.107850 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-zw855" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.108860 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gpj2\" (UniqueName: \"kubernetes.io/projected/5a21948c-c3c2-45c0-9d3e-9c6d36376990-kube-api-access-4gpj2\") pod \"nova-operator-controller-manager-57bb74c7bf-2jcr8\" (UID: \"5a21948c-c3c2-45c0-9d3e-9c6d36376990\") " pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.112393 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.115978 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl7mz\" (UniqueName: \"kubernetes.io/projected/94fd5831-788d-4f63-b40b-03f64a627450-kube-api-access-fl7mz\") pod \"manila-operator-controller-manager-59578bc799-rgh45\" (UID: \"94fd5831-788d-4f63-b40b-03f64a627450\") " pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.131591 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.151837 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.156182 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.157340 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.166120 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-wjp57" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.168520 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.173932 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxvmg\" (UniqueName: \"kubernetes.io/projected/b70ea1d6-5c99-4c30-8df7-2baff910aeab-kube-api-access-rxvmg\") pod \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.174014 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b70ea1d6-5c99-4c30-8df7-2baff910aeab-config-volume\") pod \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.177519 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b70ea1d6-5c99-4c30-8df7-2baff910aeab-config-volume" (OuterVolumeSpecName: "config-volume") pod "b70ea1d6-5c99-4c30-8df7-2baff910aeab" (UID: "b70ea1d6-5c99-4c30-8df7-2baff910aeab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.180680 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.174046 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b70ea1d6-5c99-4c30-8df7-2baff910aeab-secret-volume\") pod \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\" (UID: \"b70ea1d6-5c99-4c30-8df7-2baff910aeab\") " Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.181772 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.181850 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvpk6\" (UniqueName: \"kubernetes.io/projected/ca2c518c-b96a-45f8-bb26-48e13c7a4a13-kube-api-access-mvpk6\") pod \"ovn-operator-controller-manager-869cc7797f-ldt8v\" (UID: \"ca2c518c-b96a-45f8-bb26-48e13c7a4a13\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.181875 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqmrv\" (UniqueName: \"kubernetes.io/projected/360ca384-6921-424c-abed-01f6c7c0cf47-kube-api-access-jqmrv\") pod \"mariadb-operator-controller-manager-5777b4f897-dbpbb\" (UID: \"360ca384-6921-424c-abed-01f6c7c0cf47\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.181932 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck4sm\" (UniqueName: \"kubernetes.io/projected/01523da6-2a81-4ce1-9724-0b2f85056158-kube-api-access-ck4sm\") pod \"octavia-operator-controller-manager-6d7c7ddf95-v8xgw\" (UID: \"01523da6-2a81-4ce1-9724-0b2f85056158\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.181958 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnw5q\" (UniqueName: \"kubernetes.io/projected/4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0-kube-api-access-cnw5q\") pod \"placement-operator-controller-manager-664664cb68-9z7s6\" (UID: \"4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.181981 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmtnd\" (UniqueName: \"kubernetes.io/projected/b98a27df-76fb-4192-9bf2-fa4c4603cbdc-kube-api-access-jmtnd\") pod \"swift-operator-controller-manager-5f4d5dfdc6-fvs5m\" (UID: \"b98a27df-76fb-4192-9bf2-fa4c4603cbdc\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.182020 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88dw6\" (UniqueName: \"kubernetes.io/projected/f3229387-8b22-43f8-a298-e9debe8c59eb-kube-api-access-88dw6\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.182101 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b70ea1d6-5c99-4c30-8df7-2baff910aeab-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.198911 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b70ea1d6-5c99-4c30-8df7-2baff910aeab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b70ea1d6-5c99-4c30-8df7-2baff910aeab" (UID: "b70ea1d6-5c99-4c30-8df7-2baff910aeab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.198924 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b70ea1d6-5c99-4c30-8df7-2baff910aeab-kube-api-access-rxvmg" (OuterVolumeSpecName: "kube-api-access-rxvmg") pod "b70ea1d6-5c99-4c30-8df7-2baff910aeab" (UID: "b70ea1d6-5c99-4c30-8df7-2baff910aeab"). InnerVolumeSpecName "kube-api-access-rxvmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.212266 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck4sm\" (UniqueName: \"kubernetes.io/projected/01523da6-2a81-4ce1-9724-0b2f85056158-kube-api-access-ck4sm\") pod \"octavia-operator-controller-manager-6d7c7ddf95-v8xgw\" (UID: \"01523da6-2a81-4ce1-9724-0b2f85056158\") " pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.213040 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqmrv\" (UniqueName: \"kubernetes.io/projected/360ca384-6921-424c-abed-01f6c7c0cf47-kube-api-access-jqmrv\") pod \"mariadb-operator-controller-manager-5777b4f897-dbpbb\" (UID: \"360ca384-6921-424c-abed-01f6c7c0cf47\") " pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.220659 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.221724 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.228813 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-dfn29" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.257523 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.261850 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.285943 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287570 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmtnd\" (UniqueName: \"kubernetes.io/projected/b98a27df-76fb-4192-9bf2-fa4c4603cbdc-kube-api-access-jmtnd\") pod \"swift-operator-controller-manager-5f4d5dfdc6-fvs5m\" (UID: \"b98a27df-76fb-4192-9bf2-fa4c4603cbdc\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287641 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88dw6\" (UniqueName: \"kubernetes.io/projected/f3229387-8b22-43f8-a298-e9debe8c59eb-kube-api-access-88dw6\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287696 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdlxc\" (UniqueName: \"kubernetes.io/projected/9ff05a66-37cc-44c4-b575-97568a1ef285-kube-api-access-vdlxc\") pod \"test-operator-controller-manager-ffcdd6c94-hc9rc\" (UID: \"9ff05a66-37cc-44c4-b575-97568a1ef285\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287730 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcmcw\" (UniqueName: \"kubernetes.io/projected/ddf675ff-4cfc-4217-8d3c-4138595de655-kube-api-access-dcmcw\") pod \"telemetry-operator-controller-manager-578874c84d-x526q\" (UID: \"ddf675ff-4cfc-4217-8d3c-4138595de655\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287802 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287849 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvpk6\" (UniqueName: \"kubernetes.io/projected/ca2c518c-b96a-45f8-bb26-48e13c7a4a13-kube-api-access-mvpk6\") pod \"ovn-operator-controller-manager-869cc7797f-ldt8v\" (UID: \"ca2c518c-b96a-45f8-bb26-48e13c7a4a13\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287893 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnw5q\" (UniqueName: \"kubernetes.io/projected/4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0-kube-api-access-cnw5q\") pod \"placement-operator-controller-manager-664664cb68-9z7s6\" (UID: \"4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287928 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxvmg\" (UniqueName: \"kubernetes.io/projected/b70ea1d6-5c99-4c30-8df7-2baff910aeab-kube-api-access-rxvmg\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.287938 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b70ea1d6-5c99-4c30-8df7-2baff910aeab-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.288196 4953 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.288235 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert podName:f3229387-8b22-43f8-a298-e9debe8c59eb nodeName:}" failed. No retries permitted until 2025-10-11 03:00:03.788221323 +0000 UTC m=+814.721308967 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" (UID: "f3229387-8b22-43f8-a298-e9debe8c59eb") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.315394 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88dw6\" (UniqueName: \"kubernetes.io/projected/f3229387-8b22-43f8-a298-e9debe8c59eb-kube-api-access-88dw6\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.319197 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvpk6\" (UniqueName: \"kubernetes.io/projected/ca2c518c-b96a-45f8-bb26-48e13c7a4a13-kube-api-access-mvpk6\") pod \"ovn-operator-controller-manager-869cc7797f-ldt8v\" (UID: \"ca2c518c-b96a-45f8-bb26-48e13c7a4a13\") " pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.331904 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnw5q\" (UniqueName: \"kubernetes.io/projected/4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0-kube-api-access-cnw5q\") pod \"placement-operator-controller-manager-664664cb68-9z7s6\" (UID: \"4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0\") " pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.341767 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.343496 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.344221 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmtnd\" (UniqueName: \"kubernetes.io/projected/b98a27df-76fb-4192-9bf2-fa4c4603cbdc-kube-api-access-jmtnd\") pod \"swift-operator-controller-manager-5f4d5dfdc6-fvs5m\" (UID: \"b98a27df-76fb-4192-9bf2-fa4c4603cbdc\") " pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.348033 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.350816 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.356167 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-htzmg" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.360944 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.386782 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.389037 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdlxc\" (UniqueName: \"kubernetes.io/projected/9ff05a66-37cc-44c4-b575-97568a1ef285-kube-api-access-vdlxc\") pod \"test-operator-controller-manager-ffcdd6c94-hc9rc\" (UID: \"9ff05a66-37cc-44c4-b575-97568a1ef285\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.389089 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7tp4\" (UniqueName: \"kubernetes.io/projected/2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4-kube-api-access-g7tp4\") pod \"watcher-operator-controller-manager-646675d848-n7g7d\" (UID: \"2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.389117 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcmcw\" (UniqueName: \"kubernetes.io/projected/ddf675ff-4cfc-4217-8d3c-4138595de655-kube-api-access-dcmcw\") pod \"telemetry-operator-controller-manager-578874c84d-x526q\" (UID: \"ddf675ff-4cfc-4217-8d3c-4138595de655\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.399542 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.412616 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcmcw\" (UniqueName: \"kubernetes.io/projected/ddf675ff-4cfc-4217-8d3c-4138595de655-kube-api-access-dcmcw\") pod \"telemetry-operator-controller-manager-578874c84d-x526q\" (UID: \"ddf675ff-4cfc-4217-8d3c-4138595de655\") " pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.413003 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.426428 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdlxc\" (UniqueName: \"kubernetes.io/projected/9ff05a66-37cc-44c4-b575-97568a1ef285-kube-api-access-vdlxc\") pod \"test-operator-controller-manager-ffcdd6c94-hc9rc\" (UID: \"9ff05a66-37cc-44c4-b575-97568a1ef285\") " pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.429579 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.431276 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.458947 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-s2h67" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.466394 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.480216 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.490230 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" event={"ID":"b70ea1d6-5c99-4c30-8df7-2baff910aeab","Type":"ContainerDied","Data":"103616d70c12e0c9241a579f971908eec834770a532176dadbb24908ba2b28ea"} Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.490570 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="103616d70c12e0c9241a579f971908eec834770a532176dadbb24908ba2b28ea" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.490681 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.492836 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7tp4\" (UniqueName: \"kubernetes.io/projected/2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4-kube-api-access-g7tp4\") pod \"watcher-operator-controller-manager-646675d848-n7g7d\" (UID: \"2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.492945 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a319207e-2833-4dd6-b9db-60ce94fd41af-cert\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.493014 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zdpm\" (UniqueName: \"kubernetes.io/projected/aa404280-7ac9-4da4-876d-a6fe37afc9af-kube-api-access-2zdpm\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.493065 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa404280-7ac9-4da4-876d-a6fe37afc9af-cert\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.514583 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a319207e-2833-4dd6-b9db-60ce94fd41af-cert\") pod \"infra-operator-controller-manager-685c45897-wc8m8\" (UID: \"a319207e-2833-4dd6-b9db-60ce94fd41af\") " pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.518200 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.529481 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7tp4\" (UniqueName: \"kubernetes.io/projected/2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4-kube-api-access-g7tp4\") pod \"watcher-operator-controller-manager-646675d848-n7g7d\" (UID: \"2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4\") " pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.567501 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.593810 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.596354 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkwjf\" (UniqueName: \"kubernetes.io/projected/b94cb011-c2e4-4a1e-a75e-5198d4f76dbc-kube-api-access-lkwjf\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd\" (UID: \"b94cb011-c2e4-4a1e-a75e-5198d4f76dbc\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.596404 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zdpm\" (UniqueName: \"kubernetes.io/projected/aa404280-7ac9-4da4-876d-a6fe37afc9af-kube-api-access-2zdpm\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.596438 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa404280-7ac9-4da4-876d-a6fe37afc9af-cert\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.596556 4953 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.596617 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa404280-7ac9-4da4-876d-a6fe37afc9af-cert podName:aa404280-7ac9-4da4-876d-a6fe37afc9af nodeName:}" failed. No retries permitted until 2025-10-11 03:00:04.096586533 +0000 UTC m=+815.029674177 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/aa404280-7ac9-4da4-876d-a6fe37afc9af-cert") pod "openstack-operator-controller-manager-6fcfdfbc78-45lc4" (UID: "aa404280-7ac9-4da4-876d-a6fe37afc9af") : secret "webhook-server-cert" not found Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.597022 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m"] Oct 11 03:00:03 crc kubenswrapper[4953]: W1011 03:00:03.607856 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86c53eae_1329_4321_86d2_80b140234b48.slice/crio-5b7a599c000b3dd42b178f69af637e1b847a3988fdc41dfe61758f14b5320e20 WatchSource:0}: Error finding container 5b7a599c000b3dd42b178f69af637e1b847a3988fdc41dfe61758f14b5320e20: Status 404 returned error can't find the container with id 5b7a599c000b3dd42b178f69af637e1b847a3988fdc41dfe61758f14b5320e20 Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.625216 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zdpm\" (UniqueName: \"kubernetes.io/projected/aa404280-7ac9-4da4-876d-a6fe37afc9af-kube-api-access-2zdpm\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:03 crc kubenswrapper[4953]: W1011 03:00:03.626743 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f923b33_4a14_4290_90f7_7e6cee41df34.slice/crio-a9f234b9b3adf7a551e503554afafde4c608a2d7454c59d9d402fc8d71d4fc4e WatchSource:0}: Error finding container a9f234b9b3adf7a551e503554afafde4c608a2d7454c59d9d402fc8d71d4fc4e: Status 404 returned error can't find the container with id a9f234b9b3adf7a551e503554afafde4c608a2d7454c59d9d402fc8d71d4fc4e Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.672237 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.698491 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkwjf\" (UniqueName: \"kubernetes.io/projected/b94cb011-c2e4-4a1e-a75e-5198d4f76dbc-kube-api-access-lkwjf\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd\" (UID: \"b94cb011-c2e4-4a1e-a75e-5198d4f76dbc\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.731015 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkwjf\" (UniqueName: \"kubernetes.io/projected/b94cb011-c2e4-4a1e-a75e-5198d4f76dbc-kube-api-access-lkwjf\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd\" (UID: \"b94cb011-c2e4-4a1e-a75e-5198d4f76dbc\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.771467 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.787881 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck"] Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.800399 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.800714 4953 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 11 03:00:03 crc kubenswrapper[4953]: E1011 03:00:03.800842 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert podName:f3229387-8b22-43f8-a298-e9debe8c59eb nodeName:}" failed. No retries permitted until 2025-10-11 03:00:04.800827276 +0000 UTC m=+815.733914920 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert") pod "openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" (UID: "f3229387-8b22-43f8-a298-e9debe8c59eb") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 11 03:00:03 crc kubenswrapper[4953]: I1011 03:00:03.847902 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" Oct 11 03:00:03 crc kubenswrapper[4953]: W1011 03:00:03.892197 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fdfc46d_0e5a_4b00_bc31_67beabb8c089.slice/crio-f042864098a7bbb49b1e9a8d4acb530bc5f4750b76f2eb06952f09e22c2488ef WatchSource:0}: Error finding container f042864098a7bbb49b1e9a8d4acb530bc5f4750b76f2eb06952f09e22c2488ef: Status 404 returned error can't find the container with id f042864098a7bbb49b1e9a8d4acb530bc5f4750b76f2eb06952f09e22c2488ef Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.105201 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa404280-7ac9-4da4-876d-a6fe37afc9af-cert\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.112122 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa404280-7ac9-4da4-876d-a6fe37afc9af-cert\") pod \"openstack-operator-controller-manager-6fcfdfbc78-45lc4\" (UID: \"aa404280-7ac9-4da4-876d-a6fe37afc9af\") " pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.177649 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-59578bc799-rgh45"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.182392 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq"] Oct 11 03:00:04 crc kubenswrapper[4953]: W1011 03:00:04.191464 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94fd5831_788d_4f63_b40b_03f64a627450.slice/crio-f743d85d74b40041b48fbb72d7df78be72bb88dbd1b76a911c3f3aef7c132a4a WatchSource:0}: Error finding container f743d85d74b40041b48fbb72d7df78be72bb88dbd1b76a911c3f3aef7c132a4a: Status 404 returned error can't find the container with id f743d85d74b40041b48fbb72d7df78be72bb88dbd1b76a911c3f3aef7c132a4a Oct 11 03:00:04 crc kubenswrapper[4953]: W1011 03:00:04.191708 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafa4c58f_9d37_43b6_a7f7_a9d75b68c39c.slice/crio-e8483aa11e91496b9b09c4326a08a269cb0e8861c2e56f60ea037fb084738c8d WatchSource:0}: Error finding container e8483aa11e91496b9b09c4326a08a269cb0e8861c2e56f60ea037fb084738c8d: Status 404 returned error can't find the container with id e8483aa11e91496b9b09c4326a08a269cb0e8861c2e56f60ea037fb084738c8d Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.220990 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb"] Oct 11 03:00:04 crc kubenswrapper[4953]: W1011 03:00:04.231064 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9afccad_9780_49c8_a7f1_eea5cdf50239.slice/crio-4f601d2cfb5dd3f7ceae0bc2ada5b449e5096188e5a13a75ffa88c2346191034 WatchSource:0}: Error finding container 4f601d2cfb5dd3f7ceae0bc2ada5b449e5096188e5a13a75ffa88c2346191034: Status 404 returned error can't find the container with id 4f601d2cfb5dd3f7ceae0bc2ada5b449e5096188e5a13a75ffa88c2346191034 Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.251398 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb"] Oct 11 03:00:04 crc kubenswrapper[4953]: W1011 03:00:04.256179 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91516d61_8792_4b37_aa49_d72705bae472.slice/crio-1d0a7c4c6896cdb27890f59bbf95e5630d3edf96a4f8fd75a536237e00e63e9b WatchSource:0}: Error finding container 1d0a7c4c6896cdb27890f59bbf95e5630d3edf96a4f8fd75a536237e00e63e9b: Status 404 returned error can't find the container with id 1d0a7c4c6896cdb27890f59bbf95e5630d3edf96a4f8fd75a536237e00e63e9b Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.398406 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.409958 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp"] Oct 11 03:00:04 crc kubenswrapper[4953]: W1011 03:00:04.425358 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda95dee0f_7e54_41f1_99d3_3df7a8554793.slice/crio-a256ecfdcbc8158289977e245c0674940444e561b0418a0d3738c314c4124492 WatchSource:0}: Error finding container a256ecfdcbc8158289977e245c0674940444e561b0418a0d3738c314c4124492: Status 404 returned error can't find the container with id a256ecfdcbc8158289977e245c0674940444e561b0418a0d3738c314c4124492 Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.502405 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" event={"ID":"94fd5831-788d-4f63-b40b-03f64a627450","Type":"ContainerStarted","Data":"f743d85d74b40041b48fbb72d7df78be72bb88dbd1b76a911c3f3aef7c132a4a"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.504865 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" event={"ID":"afa4c58f-9d37-43b6-a7f7-a9d75b68c39c","Type":"ContainerStarted","Data":"e8483aa11e91496b9b09c4326a08a269cb0e8861c2e56f60ea037fb084738c8d"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.506682 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" event={"ID":"a95dee0f-7e54-41f1-99d3-3df7a8554793","Type":"ContainerStarted","Data":"a256ecfdcbc8158289977e245c0674940444e561b0418a0d3738c314c4124492"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.507991 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" event={"ID":"91516d61-8792-4b37-aa49-d72705bae472","Type":"ContainerStarted","Data":"1d0a7c4c6896cdb27890f59bbf95e5630d3edf96a4f8fd75a536237e00e63e9b"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.510145 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" event={"ID":"6fdfc46d-0e5a-4b00-bc31-67beabb8c089","Type":"ContainerStarted","Data":"f042864098a7bbb49b1e9a8d4acb530bc5f4750b76f2eb06952f09e22c2488ef"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.511163 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" event={"ID":"0f923b33-4a14-4290-90f7-7e6cee41df34","Type":"ContainerStarted","Data":"a9f234b9b3adf7a551e503554afafde4c608a2d7454c59d9d402fc8d71d4fc4e"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.512290 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" event={"ID":"f9afccad-9780-49c8-a7f1-eea5cdf50239","Type":"ContainerStarted","Data":"4f601d2cfb5dd3f7ceae0bc2ada5b449e5096188e5a13a75ffa88c2346191034"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.514948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" event={"ID":"c7b14405-4593-4f33-99a7-d40ce066518e","Type":"ContainerStarted","Data":"046b2e1cf215a2daffea45f27aa8b3186c196d87665ec432d20f32b9c0416634"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.519160 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" event={"ID":"86c53eae-1329-4321-86d2-80b140234b48","Type":"ContainerStarted","Data":"5b7a599c000b3dd42b178f69af637e1b847a3988fdc41dfe61758f14b5320e20"} Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.613100 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.632775 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.640186 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.644499 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.649793 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.816840 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.824109 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3229387-8b22-43f8-a298-e9debe8c59eb-cert\") pod \"openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66\" (UID: \"f3229387-8b22-43f8-a298-e9debe8c59eb\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.836729 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.847631 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.853745 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.868772 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.873479 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.877475 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.881849 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-685c45897-wc8m8"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.886843 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q"] Oct 11 03:00:04 crc kubenswrapper[4953]: I1011 03:00:04.912988 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4"] Oct 11 03:00:05 crc kubenswrapper[4953]: W1011 03:00:05.310822 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ff05a66_37cc_44c4_b575_97568a1ef285.slice/crio-99d3fd420b9fbbf90b39854f3254d46095ec65d70434221bc137251c11780e53 WatchSource:0}: Error finding container 99d3fd420b9fbbf90b39854f3254d46095ec65d70434221bc137251c11780e53: Status 404 returned error can't find the container with id 99d3fd420b9fbbf90b39854f3254d46095ec65d70434221bc137251c11780e53 Oct 11 03:00:05 crc kubenswrapper[4953]: W1011 03:00:05.312666 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddf675ff_4cfc_4217_8d3c_4138595de655.slice/crio-2b909b9eacca8824b7585823fc1e5865c3021c872c171c98590191a0762336e6 WatchSource:0}: Error finding container 2b909b9eacca8824b7585823fc1e5865c3021c872c171c98590191a0762336e6: Status 404 returned error can't find the container with id 2b909b9eacca8824b7585823fc1e5865c3021c872c171c98590191a0762336e6 Oct 11 03:00:05 crc kubenswrapper[4953]: E1011 03:00:05.315091 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dcmcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-578874c84d-x526q_openstack-operators(ddf675ff-4cfc-4217-8d3c-4138595de655): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 03:00:05 crc kubenswrapper[4953]: W1011 03:00:05.315681 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb94cb011_c2e4_4a1e_a75e_5198d4f76dbc.slice/crio-80cbea184d0b602657d019429c1b76a322f2bded8db454752316e084d8562289 WatchSource:0}: Error finding container 80cbea184d0b602657d019429c1b76a322f2bded8db454752316e084d8562289: Status 404 returned error can't find the container with id 80cbea184d0b602657d019429c1b76a322f2bded8db454752316e084d8562289 Oct 11 03:00:05 crc kubenswrapper[4953]: W1011 03:00:05.320376 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda319207e_2833_4dd6_b9db_60ce94fd41af.slice/crio-35776a168a0f6fda9eee2cb6b57a8e588835ff8bcc3db4d1e02102fb56ac8c4a WatchSource:0}: Error finding container 35776a168a0f6fda9eee2cb6b57a8e588835ff8bcc3db4d1e02102fb56ac8c4a: Status 404 returned error can't find the container with id 35776a168a0f6fda9eee2cb6b57a8e588835ff8bcc3db4d1e02102fb56ac8c4a Oct 11 03:00:05 crc kubenswrapper[4953]: E1011 03:00:05.325519 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lkwjf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd_openstack-operators(b94cb011-c2e4-4a1e-a75e-5198d4f76dbc): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 03:00:05 crc kubenswrapper[4953]: E1011 03:00:05.326785 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" podUID="b94cb011-c2e4-4a1e-a75e-5198d4f76dbc" Oct 11 03:00:05 crc kubenswrapper[4953]: E1011 03:00:05.328081 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:52516b81f63966c7226180af634ff415f31174b382458f324b9ac0602eacc6a1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mwk9x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-685c45897-wc8m8_openstack-operators(a319207e-2833-4dd6-b9db-60ce94fd41af): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.527571 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" event={"ID":"4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0","Type":"ContainerStarted","Data":"220acf5ba5397c6ec2d1b052bcf7d57e507c11709514ab25a64ca70e647383fb"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.529173 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" event={"ID":"66ec2823-3d89-4b72-81cf-1bb9d6cc4c49","Type":"ContainerStarted","Data":"66cc176e541ddbe6bef6ee119b27de995732ba1b7c855dc5e84a883c9a31f503"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.530812 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" event={"ID":"9ff05a66-37cc-44c4-b575-97568a1ef285","Type":"ContainerStarted","Data":"99d3fd420b9fbbf90b39854f3254d46095ec65d70434221bc137251c11780e53"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.532088 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" event={"ID":"5a21948c-c3c2-45c0-9d3e-9c6d36376990","Type":"ContainerStarted","Data":"d2cb4458b4d03b4f0283275a07707fd50874796f0b397809c02640b001c3f4f5"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.534277 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" event={"ID":"b94cb011-c2e4-4a1e-a75e-5198d4f76dbc","Type":"ContainerStarted","Data":"80cbea184d0b602657d019429c1b76a322f2bded8db454752316e084d8562289"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.535642 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" event={"ID":"b98a27df-76fb-4192-9bf2-fa4c4603cbdc","Type":"ContainerStarted","Data":"4cc03d4689b99e6398cec53ff9f5347f0021211e1817fb80783dfa602e1f2dd6"} Oct 11 03:00:05 crc kubenswrapper[4953]: E1011 03:00:05.536435 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" podUID="b94cb011-c2e4-4a1e-a75e-5198d4f76dbc" Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.537843 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" event={"ID":"360ca384-6921-424c-abed-01f6c7c0cf47","Type":"ContainerStarted","Data":"1bd61ed372179eca79e55b1856a2749a87e25ab730e878c9330dab4abd755844"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.539346 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" event={"ID":"01523da6-2a81-4ce1-9724-0b2f85056158","Type":"ContainerStarted","Data":"7527afdbe286a86afa16bde3303e6bd1865d733ce08509115af21f620483a25f"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.541642 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" event={"ID":"ca2c518c-b96a-45f8-bb26-48e13c7a4a13","Type":"ContainerStarted","Data":"ed426f2bf679970bf2244c7e2d9739a267c8addea1a2ebb5fa54d233ad784477"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.542922 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" event={"ID":"ddf675ff-4cfc-4217-8d3c-4138595de655","Type":"ContainerStarted","Data":"2b909b9eacca8824b7585823fc1e5865c3021c872c171c98590191a0762336e6"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.544486 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" event={"ID":"aa404280-7ac9-4da4-876d-a6fe37afc9af","Type":"ContainerStarted","Data":"9b37b12f4fd3c8bd7fdcfb21929e071d0e6abe41749ec5f7d9bdef97b6deddb4"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.546216 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" event={"ID":"2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4","Type":"ContainerStarted","Data":"edd2bfc55ce21509422ac0c34c197135a7d7dae093262c9931e68e9e389611b9"} Oct 11 03:00:05 crc kubenswrapper[4953]: I1011 03:00:05.549662 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" event={"ID":"a319207e-2833-4dd6-b9db-60ce94fd41af","Type":"ContainerStarted","Data":"35776a168a0f6fda9eee2cb6b57a8e588835ff8bcc3db4d1e02102fb56ac8c4a"} Oct 11 03:00:06 crc kubenswrapper[4953]: E1011 03:00:06.564651 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" podUID="b94cb011-c2e4-4a1e-a75e-5198d4f76dbc" Oct 11 03:00:15 crc kubenswrapper[4953]: I1011 03:00:15.907865 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66"] Oct 11 03:00:15 crc kubenswrapper[4953]: E1011 03:00:15.927331 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" podUID="ddf675ff-4cfc-4217-8d3c-4138595de655" Oct 11 03:00:15 crc kubenswrapper[4953]: E1011 03:00:15.974691 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" podUID="a319207e-2833-4dd6-b9db-60ce94fd41af" Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.637912 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" event={"ID":"0f923b33-4a14-4290-90f7-7e6cee41df34","Type":"ContainerStarted","Data":"48c5fd53217ec1e3b827db9347f9b34f73923f1b6844e3b9a36c6aaa2ba6eb25"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.639950 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" event={"ID":"aa404280-7ac9-4da4-876d-a6fe37afc9af","Type":"ContainerStarted","Data":"2560ca0c583946e56631a84dd7931ac6ea2bab25198c6b1fe449f29752265909"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.641999 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" event={"ID":"94fd5831-788d-4f63-b40b-03f64a627450","Type":"ContainerStarted","Data":"c1c27476a0081802c9c6b988f43f309ca68610408301b5d0559aa1e6ce2e01bd"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.644068 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" event={"ID":"f9afccad-9780-49c8-a7f1-eea5cdf50239","Type":"ContainerStarted","Data":"c7351183377b266060dca933e9dffae30616aa191a2e236272a1ba28f6c66670"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.645504 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" event={"ID":"a319207e-2833-4dd6-b9db-60ce94fd41af","Type":"ContainerStarted","Data":"e28ff0cd7ab40064a5e2a8a6ef270cf4fae5a48f3d585c3cd07ae28f9cf5ad99"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.650411 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" event={"ID":"4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0","Type":"ContainerStarted","Data":"6ddd1bacae769deaf6d3d4f98ca721c22faf5563782d40ec24582260225dcb99"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.654388 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" event={"ID":"ddf675ff-4cfc-4217-8d3c-4138595de655","Type":"ContainerStarted","Data":"59851a069ae00710f32cb7eac7c5797f2c45ac44b4aa4f96529a2dc6891e9c34"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.674375 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" event={"ID":"86c53eae-1329-4321-86d2-80b140234b48","Type":"ContainerStarted","Data":"858e50140b830a46285cfcaaca0eab0d72c9e594caf1c236636c8a33851c4599"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.675791 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" event={"ID":"f3229387-8b22-43f8-a298-e9debe8c59eb","Type":"ContainerStarted","Data":"81f82d813ca92edef7f3c0d5cba95c4e435fa3573d8083eab99e3877f75ad661"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.676910 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" event={"ID":"66ec2823-3d89-4b72-81cf-1bb9d6cc4c49","Type":"ContainerStarted","Data":"c11bccb384abd39a8c5fd59daa4b34a83a229aaeef6bb3ecbfa493103872394c"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.679326 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" event={"ID":"6fdfc46d-0e5a-4b00-bc31-67beabb8c089","Type":"ContainerStarted","Data":"433d3508a27808bdee60afb3b088b07f3ea718c01342e5e2a37b89f02300b88f"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.682007 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" event={"ID":"c7b14405-4593-4f33-99a7-d40ce066518e","Type":"ContainerStarted","Data":"9f3f309f9b73afdaf8aab58e91aece3631fe5ef60940f1cf9115dcd3efeded7e"} Oct 11 03:00:16 crc kubenswrapper[4953]: I1011 03:00:16.685847 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" event={"ID":"91516d61-8792-4b37-aa49-d72705bae472","Type":"ContainerStarted","Data":"f9e2b9047b0863d46cce3e176099420a7c2c382084c472b9e52dee851297c5c4"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.699527 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" event={"ID":"9ff05a66-37cc-44c4-b575-97568a1ef285","Type":"ContainerStarted","Data":"4f6318eae3713b85986b9c6a72efd5fac33617300f745976baa32edfde7c8bc9"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.700884 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" event={"ID":"afa4c58f-9d37-43b6-a7f7-a9d75b68c39c","Type":"ContainerStarted","Data":"1040a0486c9ca9a041456233d797c7dc76f1d8f5d0cb1af743f6202f4b7d1340"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.709993 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" event={"ID":"0f923b33-4a14-4290-90f7-7e6cee41df34","Type":"ContainerStarted","Data":"af1fedf961cdaa9132dcf77d05f3eef504c86bedca95ad030247078c5aee66b5"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.710337 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.717615 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" event={"ID":"2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4","Type":"ContainerStarted","Data":"628f0b924a30ba10325e35a8e987edd734240163a67a1191c6db8ae26a5fe1fb"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.719386 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" event={"ID":"01523da6-2a81-4ce1-9724-0b2f85056158","Type":"ContainerStarted","Data":"ff76a10b04b6067202f64c07d54c5ba85695ffb8d4c39b43b00d2f2164821136"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.721031 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" event={"ID":"5a21948c-c3c2-45c0-9d3e-9c6d36376990","Type":"ContainerStarted","Data":"60c8d59178161a6926dd04bf80b54b550339efffe02f17489c117630670758aa"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.727238 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" event={"ID":"360ca384-6921-424c-abed-01f6c7c0cf47","Type":"ContainerStarted","Data":"4dc9b931c7782fcd4716d9b598758bb4660ddeab8cddc3c65b1b51f7ed4f163e"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.729627 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" event={"ID":"ca2c518c-b96a-45f8-bb26-48e13c7a4a13","Type":"ContainerStarted","Data":"43f3d59e19068576a545b721070b65981ccbdd2652ff0991ba387dd726614471"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.731932 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" podStartSLOduration=8.89092735 podStartE2EDuration="15.731917113s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:03.631858454 +0000 UTC m=+814.564946098" lastFinishedPulling="2025-10-11 03:00:10.472848217 +0000 UTC m=+821.405935861" observedRunningTime="2025-10-11 03:00:17.729226755 +0000 UTC m=+828.662314399" watchObservedRunningTime="2025-10-11 03:00:17.731917113 +0000 UTC m=+828.665004757" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.749645 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" event={"ID":"a95dee0f-7e54-41f1-99d3-3df7a8554793","Type":"ContainerStarted","Data":"2dabbd13e1195d3dd5ee81406999a3fcad0810028802319e4b8fd20148bd9af1"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.752449 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" event={"ID":"aa404280-7ac9-4da4-876d-a6fe37afc9af","Type":"ContainerStarted","Data":"05cabb0f624f73ab6634b54c493a2ec503df4dc5e55be596361d206326343c25"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.752703 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.786940 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" event={"ID":"91516d61-8792-4b37-aa49-d72705bae472","Type":"ContainerStarted","Data":"2a3eef81d0013f22d09c6c941d1d72e0df3e5d79e9f007670837623f41adbcd4"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.787081 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.791782 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" event={"ID":"b98a27df-76fb-4192-9bf2-fa4c4603cbdc","Type":"ContainerStarted","Data":"0e66308441e9b95f09fe198b8051a69ff7868a9a9074788881a44338b577accf"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.794394 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" podStartSLOduration=14.794380908 podStartE2EDuration="14.794380908s" podCreationTimestamp="2025-10-11 03:00:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:00:17.792878569 +0000 UTC m=+828.725966223" watchObservedRunningTime="2025-10-11 03:00:17.794380908 +0000 UTC m=+828.727468552" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.823953 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" podStartSLOduration=4.626790684 podStartE2EDuration="15.823936622s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:04.260393416 +0000 UTC m=+815.193481060" lastFinishedPulling="2025-10-11 03:00:15.457539324 +0000 UTC m=+826.390626998" observedRunningTime="2025-10-11 03:00:17.81876584 +0000 UTC m=+828.751853484" watchObservedRunningTime="2025-10-11 03:00:17.823936622 +0000 UTC m=+828.757024266" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.833194 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" event={"ID":"86c53eae-1329-4321-86d2-80b140234b48","Type":"ContainerStarted","Data":"63e73eaf2eee5bb3f0cc1819656acf23071778d9dbd988bdaf62156550d3b783"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.833241 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" event={"ID":"94fd5831-788d-4f63-b40b-03f64a627450","Type":"ContainerStarted","Data":"f7ff1d7a93168a242c49a5c79b0a58c6f818c4358a87c89e542033746bda5cb9"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.833265 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.833279 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.833289 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" event={"ID":"f9afccad-9780-49c8-a7f1-eea5cdf50239","Type":"ContainerStarted","Data":"97af21b6d96188d68e35589062487af8e026e2d19c574b4cfa8b374346bfd854"} Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.833301 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.846488 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" podStartSLOduration=9.370037758 podStartE2EDuration="15.846469547s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:03.61135581 +0000 UTC m=+814.544443454" lastFinishedPulling="2025-10-11 03:00:10.087787599 +0000 UTC m=+821.020875243" observedRunningTime="2025-10-11 03:00:17.836975975 +0000 UTC m=+828.770063619" watchObservedRunningTime="2025-10-11 03:00:17.846469547 +0000 UTC m=+828.779557181" Oct 11 03:00:17 crc kubenswrapper[4953]: I1011 03:00:17.890866 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" podStartSLOduration=4.429883928 podStartE2EDuration="15.890834549s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:04.193309054 +0000 UTC m=+815.126396698" lastFinishedPulling="2025-10-11 03:00:15.654259665 +0000 UTC m=+826.587347319" observedRunningTime="2025-10-11 03:00:17.859807347 +0000 UTC m=+828.792895011" watchObservedRunningTime="2025-10-11 03:00:17.890834549 +0000 UTC m=+828.823922193" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.846217 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" event={"ID":"afa4c58f-9d37-43b6-a7f7-a9d75b68c39c","Type":"ContainerStarted","Data":"c05dd4cdd9864c8c8acb92504fa7a037de8a9e7c6f5d25a62062443db7a880a9"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.846308 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.849981 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" event={"ID":"a95dee0f-7e54-41f1-99d3-3df7a8554793","Type":"ContainerStarted","Data":"2a9b3a869d147b4ec0fd388a42fa55efdd88daf4eb9ed813f46a11875540abd7"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.850073 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.861634 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" podStartSLOduration=5.525312867 podStartE2EDuration="16.861620777s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:04.236501896 +0000 UTC m=+815.169589540" lastFinishedPulling="2025-10-11 03:00:15.572809766 +0000 UTC m=+826.505897450" observedRunningTime="2025-10-11 03:00:17.896446853 +0000 UTC m=+828.829534497" watchObservedRunningTime="2025-10-11 03:00:18.861620777 +0000 UTC m=+829.794708421" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.866086 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" podStartSLOduration=5.408779203 podStartE2EDuration="16.866077061s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:04.198047215 +0000 UTC m=+815.131134859" lastFinishedPulling="2025-10-11 03:00:15.655345063 +0000 UTC m=+826.588432717" observedRunningTime="2025-10-11 03:00:18.859211996 +0000 UTC m=+829.792299630" watchObservedRunningTime="2025-10-11 03:00:18.866077061 +0000 UTC m=+829.799164705" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.866761 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" event={"ID":"2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4","Type":"ContainerStarted","Data":"9b7f48ae807bda311318e782af72b17ff23bed1111d6e9e3b0cbbf4303e1cf53"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.866878 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.901028 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" event={"ID":"c7b14405-4593-4f33-99a7-d40ce066518e","Type":"ContainerStarted","Data":"007781dc73899c9612fcd05c7f364c4dcadef17bbb567958355bf1c8f27f9a98"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.901085 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.905948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" event={"ID":"01523da6-2a81-4ce1-9724-0b2f85056158","Type":"ContainerStarted","Data":"673dee873e040a3a3362d716ad6aacc685a5c679be52eb70e853afb69377c369"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.906254 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" podStartSLOduration=5.678460836 podStartE2EDuration="16.904335197s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:04.428535018 +0000 UTC m=+815.361622672" lastFinishedPulling="2025-10-11 03:00:15.654409359 +0000 UTC m=+826.587497033" observedRunningTime="2025-10-11 03:00:18.888993966 +0000 UTC m=+829.822081610" watchObservedRunningTime="2025-10-11 03:00:18.904335197 +0000 UTC m=+829.837422851" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.906543 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.919790 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" podStartSLOduration=5.473166779 podStartE2EDuration="15.919771551s" podCreationTimestamp="2025-10-11 03:00:03 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.310215481 +0000 UTC m=+816.243303125" lastFinishedPulling="2025-10-11 03:00:15.756820253 +0000 UTC m=+826.689907897" observedRunningTime="2025-10-11 03:00:18.916783875 +0000 UTC m=+829.849871529" watchObservedRunningTime="2025-10-11 03:00:18.919771551 +0000 UTC m=+829.852859195" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.922410 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" event={"ID":"360ca384-6921-424c-abed-01f6c7c0cf47","Type":"ContainerStarted","Data":"6d94120e097786e5e8e974944a635c5d65e9e20bb7151d5f6872aa1bcc5f5324"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.923313 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.928707 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" event={"ID":"4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0","Type":"ContainerStarted","Data":"49cb05c438f5871cbd369d7ab03b767da8532d9bb8721cba546fbb7a4818ca07"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.929108 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.933882 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" podStartSLOduration=5.176355891 podStartE2EDuration="16.933870041s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:03.896750855 +0000 UTC m=+814.829838499" lastFinishedPulling="2025-10-11 03:00:15.654264995 +0000 UTC m=+826.587352649" observedRunningTime="2025-10-11 03:00:18.930093145 +0000 UTC m=+829.863180789" watchObservedRunningTime="2025-10-11 03:00:18.933870041 +0000 UTC m=+829.866957685" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.940247 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" event={"ID":"ca2c518c-b96a-45f8-bb26-48e13c7a4a13","Type":"ContainerStarted","Data":"9edb924f3f1e53f8a501c17be4666a77b63efb98d57f652114ecf821f1fca39a"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.940288 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.943616 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" event={"ID":"5a21948c-c3c2-45c0-9d3e-9c6d36376990","Type":"ContainerStarted","Data":"959f7f0277370f42a9bd774aa68ec6f01da179c95b4ede5941eecba0083ad07d"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.943993 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.945909 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" event={"ID":"b98a27df-76fb-4192-9bf2-fa4c4603cbdc","Type":"ContainerStarted","Data":"fb81628b686c05b042c4b37512167f05b9ee83f8f0fbde33782a339c8b64e73c"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.946243 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.950539 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" event={"ID":"6fdfc46d-0e5a-4b00-bc31-67beabb8c089","Type":"ContainerStarted","Data":"7ac137163f82666c1e1790ee505a7932f10c8a5ea41b8e31a070b6720874b4e0"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.950879 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.954473 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" event={"ID":"66ec2823-3d89-4b72-81cf-1bb9d6cc4c49","Type":"ContainerStarted","Data":"79b8db56a51652cd0ae5d65555fded8ee9ce9b19c5fc012d34c802407c71c660"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.955366 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.955757 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" podStartSLOduration=6.607067607 podStartE2EDuration="16.95574698s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.307094521 +0000 UTC m=+816.240182165" lastFinishedPulling="2025-10-11 03:00:15.655773884 +0000 UTC m=+826.588861538" observedRunningTime="2025-10-11 03:00:18.948661829 +0000 UTC m=+829.881749483" watchObservedRunningTime="2025-10-11 03:00:18.95574698 +0000 UTC m=+829.888834624" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.959907 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" event={"ID":"9ff05a66-37cc-44c4-b575-97568a1ef285","Type":"ContainerStarted","Data":"2b4cd7497a34418678e54750325eedc347658c661c3f5e1975720e81fdf11319"} Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.959935 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:18 crc kubenswrapper[4953]: I1011 03:00:18.980540 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" podStartSLOduration=6.514351311 podStartE2EDuration="16.980524932s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.309725679 +0000 UTC m=+816.242813323" lastFinishedPulling="2025-10-11 03:00:15.7758993 +0000 UTC m=+826.708986944" observedRunningTime="2025-10-11 03:00:18.972191979 +0000 UTC m=+829.905279613" watchObservedRunningTime="2025-10-11 03:00:18.980524932 +0000 UTC m=+829.913612576" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.007560 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" podStartSLOduration=6.540987101 podStartE2EDuration="17.007545482s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.309924714 +0000 UTC m=+816.243012358" lastFinishedPulling="2025-10-11 03:00:15.776483075 +0000 UTC m=+826.709570739" observedRunningTime="2025-10-11 03:00:18.991598105 +0000 UTC m=+829.924685749" watchObservedRunningTime="2025-10-11 03:00:19.007545482 +0000 UTC m=+829.940633126" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.028827 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" podStartSLOduration=6.682198674 podStartE2EDuration="17.028809894s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.310134449 +0000 UTC m=+816.243222093" lastFinishedPulling="2025-10-11 03:00:15.656745659 +0000 UTC m=+826.589833313" observedRunningTime="2025-10-11 03:00:19.02315685 +0000 UTC m=+829.956244504" watchObservedRunningTime="2025-10-11 03:00:19.028809894 +0000 UTC m=+829.961897538" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.029178 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" podStartSLOduration=6.5637122009999995 podStartE2EDuration="17.029174464s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.309999286 +0000 UTC m=+816.243086920" lastFinishedPulling="2025-10-11 03:00:15.775461529 +0000 UTC m=+826.708549183" observedRunningTime="2025-10-11 03:00:19.010125998 +0000 UTC m=+829.943213642" watchObservedRunningTime="2025-10-11 03:00:19.029174464 +0000 UTC m=+829.962262098" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.040747 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" podStartSLOduration=6.593562842 podStartE2EDuration="17.040735469s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.309674477 +0000 UTC m=+816.242762111" lastFinishedPulling="2025-10-11 03:00:15.756847074 +0000 UTC m=+826.689934738" observedRunningTime="2025-10-11 03:00:19.037796064 +0000 UTC m=+829.970883728" watchObservedRunningTime="2025-10-11 03:00:19.040735469 +0000 UTC m=+829.973823113" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.062702 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" podStartSLOduration=5.692274172 podStartE2EDuration="16.062687349s" podCreationTimestamp="2025-10-11 03:00:03 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.313412673 +0000 UTC m=+816.246500317" lastFinishedPulling="2025-10-11 03:00:15.68382584 +0000 UTC m=+826.616913494" observedRunningTime="2025-10-11 03:00:19.055423054 +0000 UTC m=+829.988510698" watchObservedRunningTime="2025-10-11 03:00:19.062687349 +0000 UTC m=+829.995774993" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.077907 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" podStartSLOduration=6.590282139 podStartE2EDuration="17.077889587s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.288814645 +0000 UTC m=+816.221902289" lastFinishedPulling="2025-10-11 03:00:15.776422083 +0000 UTC m=+826.709509737" observedRunningTime="2025-10-11 03:00:19.072186812 +0000 UTC m=+830.005274466" watchObservedRunningTime="2025-10-11 03:00:19.077889587 +0000 UTC m=+830.010977221" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.093326 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" podStartSLOduration=8.804969776 podStartE2EDuration="17.093310001s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:03.896986191 +0000 UTC m=+814.830073835" lastFinishedPulling="2025-10-11 03:00:12.185326406 +0000 UTC m=+823.118414060" observedRunningTime="2025-10-11 03:00:19.088920099 +0000 UTC m=+830.022007743" watchObservedRunningTime="2025-10-11 03:00:19.093310001 +0000 UTC m=+830.026397645" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.290554 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qvhk5"] Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.297802 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.310148 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qvhk5"] Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.450515 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfm6t\" (UniqueName: \"kubernetes.io/projected/83cdd1a8-da96-4c7d-882d-067d1cb67198-kube-api-access-xfm6t\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.450568 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-catalog-content\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.450687 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-utilities\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.552045 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-catalog-content\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.552116 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfm6t\" (UniqueName: \"kubernetes.io/projected/83cdd1a8-da96-4c7d-882d-067d1cb67198-kube-api-access-xfm6t\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.552196 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-utilities\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.553403 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-catalog-content\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.553491 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-utilities\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.586632 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfm6t\" (UniqueName: \"kubernetes.io/projected/83cdd1a8-da96-4c7d-882d-067d1cb67198-kube-api-access-xfm6t\") pod \"redhat-operators-qvhk5\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.632960 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.889340 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qvhk5"] Oct 11 03:00:19 crc kubenswrapper[4953]: I1011 03:00:19.972163 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerStarted","Data":"6aa3ccc63e53239937f9d9399d8e8ee3f7076d0943c573e4cea90544a790bcd5"} Oct 11 03:00:22 crc kubenswrapper[4953]: I1011 03:00:22.958511 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c565d56b4-xjd8m" Oct 11 03:00:22 crc kubenswrapper[4953]: I1011 03:00:22.989154 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-687df44cdb-tmbxg" Oct 11 03:00:22 crc kubenswrapper[4953]: I1011 03:00:22.989576 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-64f84fcdbb-mndql" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.016677 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7bb46cd7d-sg8ck" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.036867 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-6d9967f8dd-zwhzq" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.066515 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6d74794d9b-5cfjb" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.099276 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-74cb5cbc49-b6fwb" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.134837 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-ddb98f99b-4f4lp" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.156434 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-59578bc799-rgh45" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.183969 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-797d478b46-sq5vt" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.260113 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-57bb74c7bf-2jcr8" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.289696 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5777b4f897-dbpbb" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.345896 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6d7c7ddf95-v8xgw" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.369650 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-869cc7797f-ldt8v" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.403109 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-664664cb68-9z7s6" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.415353 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f4d5dfdc6-fvs5m" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.522534 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-ffcdd6c94-hc9rc" Oct 11 03:00:23 crc kubenswrapper[4953]: I1011 03:00:23.570324 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-646675d848-n7g7d" Oct 11 03:00:24 crc kubenswrapper[4953]: I1011 03:00:24.409925 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6fcfdfbc78-45lc4" Oct 11 03:00:26 crc kubenswrapper[4953]: I1011 03:00:26.043629 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerStarted","Data":"be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163"} Oct 11 03:00:27 crc kubenswrapper[4953]: I1011 03:00:27.055037 4953 generic.go:334] "Generic (PLEG): container finished" podID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerID="be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163" exitCode=0 Oct 11 03:00:27 crc kubenswrapper[4953]: I1011 03:00:27.055090 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerDied","Data":"be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163"} Oct 11 03:00:28 crc kubenswrapper[4953]: E1011 03:00:28.398981 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:52516b81f63966c7226180af634ff415f31174b382458f324b9ac0602eacc6a1" Oct 11 03:00:28 crc kubenswrapper[4953]: E1011 03:00:28.399175 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:52516b81f63966c7226180af634ff415f31174b382458f324b9ac0602eacc6a1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mwk9x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-685c45897-wc8m8_openstack-operators(a319207e-2833-4dd6-b9db-60ce94fd41af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 03:00:28 crc kubenswrapper[4953]: E1011 03:00:28.400406 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" podUID="a319207e-2833-4dd6-b9db-60ce94fd41af" Oct 11 03:00:29 crc kubenswrapper[4953]: E1011 03:00:29.084803 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:52516b81f63966c7226180af634ff415f31174b382458f324b9ac0602eacc6a1\\\"\"" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" podUID="a319207e-2833-4dd6-b9db-60ce94fd41af" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.810850 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9dntb"] Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.813392 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.836489 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flgg6\" (UniqueName: \"kubernetes.io/projected/1da46f32-93a2-4bcd-927b-71c7e63e33a3-kube-api-access-flgg6\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.836538 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-catalog-content\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.836560 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-utilities\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.842575 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9dntb"] Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.937897 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flgg6\" (UniqueName: \"kubernetes.io/projected/1da46f32-93a2-4bcd-927b-71c7e63e33a3-kube-api-access-flgg6\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.938359 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-catalog-content\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.938488 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-utilities\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.938892 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-catalog-content\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.939053 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-utilities\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:29 crc kubenswrapper[4953]: I1011 03:00:29.960282 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flgg6\" (UniqueName: \"kubernetes.io/projected/1da46f32-93a2-4bcd-927b-71c7e63e33a3-kube-api-access-flgg6\") pod \"certified-operators-9dntb\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.096188 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" event={"ID":"ddf675ff-4cfc-4217-8d3c-4138595de655","Type":"ContainerStarted","Data":"4e430c465c83a08ecf045e05c53790936250fd50c19c32b0b4b814277cf3516a"} Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.096328 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.097867 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" event={"ID":"b94cb011-c2e4-4a1e-a75e-5198d4f76dbc","Type":"ContainerStarted","Data":"eb61e57a5c5e363982f99cc4726c5e2d670c477ba575a2251d2d57f96b5a5897"} Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.099800 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerStarted","Data":"61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112"} Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.104166 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" event={"ID":"f3229387-8b22-43f8-a298-e9debe8c59eb","Type":"ContainerStarted","Data":"e4bb19bb87d1116fae46b69e220f4fd3ceb9d13dbdd2253c05bc6d611f929e9d"} Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.104210 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" event={"ID":"f3229387-8b22-43f8-a298-e9debe8c59eb","Type":"ContainerStarted","Data":"8c72c80ac0059e7e5d8b22f801995c7a4131a5a7c27deb34ab074d5198b1605e"} Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.104799 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.121225 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" podStartSLOduration=4.233665821 podStartE2EDuration="28.121196137s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.314978263 +0000 UTC m=+816.248065907" lastFinishedPulling="2025-10-11 03:00:29.202508559 +0000 UTC m=+840.135596223" observedRunningTime="2025-10-11 03:00:30.118132389 +0000 UTC m=+841.051220043" watchObservedRunningTime="2025-10-11 03:00:30.121196137 +0000 UTC m=+841.054283801" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.156405 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd" podStartSLOduration=3.22424148 podStartE2EDuration="27.156391275s" podCreationTimestamp="2025-10-11 03:00:03 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.325366248 +0000 UTC m=+816.258453892" lastFinishedPulling="2025-10-11 03:00:29.257516043 +0000 UTC m=+840.190603687" observedRunningTime="2025-10-11 03:00:30.153204124 +0000 UTC m=+841.086291778" watchObservedRunningTime="2025-10-11 03:00:30.156391275 +0000 UTC m=+841.089478919" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.162818 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.210483 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" podStartSLOduration=14.911486734 podStartE2EDuration="28.210458605s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:15.956409127 +0000 UTC m=+826.889496771" lastFinishedPulling="2025-10-11 03:00:29.255380998 +0000 UTC m=+840.188468642" observedRunningTime="2025-10-11 03:00:30.207152521 +0000 UTC m=+841.140240175" watchObservedRunningTime="2025-10-11 03:00:30.210458605 +0000 UTC m=+841.143546249" Oct 11 03:00:30 crc kubenswrapper[4953]: I1011 03:00:30.708220 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9dntb"] Oct 11 03:00:30 crc kubenswrapper[4953]: W1011 03:00:30.722875 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1da46f32_93a2_4bcd_927b_71c7e63e33a3.slice/crio-e03c215a3e867b273ea8e08b25412579c14ac486a9a4ac18834c9e2157415120 WatchSource:0}: Error finding container e03c215a3e867b273ea8e08b25412579c14ac486a9a4ac18834c9e2157415120: Status 404 returned error can't find the container with id e03c215a3e867b273ea8e08b25412579c14ac486a9a4ac18834c9e2157415120 Oct 11 03:00:31 crc kubenswrapper[4953]: I1011 03:00:31.116651 4953 generic.go:334] "Generic (PLEG): container finished" podID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerID="77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047" exitCode=0 Oct 11 03:00:31 crc kubenswrapper[4953]: I1011 03:00:31.116852 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9dntb" event={"ID":"1da46f32-93a2-4bcd-927b-71c7e63e33a3","Type":"ContainerDied","Data":"77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047"} Oct 11 03:00:31 crc kubenswrapper[4953]: I1011 03:00:31.117104 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9dntb" event={"ID":"1da46f32-93a2-4bcd-927b-71c7e63e33a3","Type":"ContainerStarted","Data":"e03c215a3e867b273ea8e08b25412579c14ac486a9a4ac18834c9e2157415120"} Oct 11 03:00:31 crc kubenswrapper[4953]: I1011 03:00:31.126833 4953 generic.go:334] "Generic (PLEG): container finished" podID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerID="61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112" exitCode=0 Oct 11 03:00:31 crc kubenswrapper[4953]: I1011 03:00:31.127012 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerDied","Data":"61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112"} Oct 11 03:00:32 crc kubenswrapper[4953]: I1011 03:00:32.139806 4953 generic.go:334] "Generic (PLEG): container finished" podID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerID="a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb" exitCode=0 Oct 11 03:00:32 crc kubenswrapper[4953]: I1011 03:00:32.140003 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9dntb" event={"ID":"1da46f32-93a2-4bcd-927b-71c7e63e33a3","Type":"ContainerDied","Data":"a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb"} Oct 11 03:00:32 crc kubenswrapper[4953]: I1011 03:00:32.142643 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerStarted","Data":"b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8"} Oct 11 03:00:32 crc kubenswrapper[4953]: I1011 03:00:32.178836 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qvhk5" podStartSLOduration=9.253027847 podStartE2EDuration="13.178813454s" podCreationTimestamp="2025-10-11 03:00:19 +0000 UTC" firstStartedPulling="2025-10-11 03:00:27.696238135 +0000 UTC m=+838.629325779" lastFinishedPulling="2025-10-11 03:00:31.622023722 +0000 UTC m=+842.555111386" observedRunningTime="2025-10-11 03:00:32.17203049 +0000 UTC m=+843.105118144" watchObservedRunningTime="2025-10-11 03:00:32.178813454 +0000 UTC m=+843.111901598" Oct 11 03:00:33 crc kubenswrapper[4953]: I1011 03:00:33.154081 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9dntb" event={"ID":"1da46f32-93a2-4bcd-927b-71c7e63e33a3","Type":"ContainerStarted","Data":"a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f"} Oct 11 03:00:33 crc kubenswrapper[4953]: I1011 03:00:33.171863 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9dntb" podStartSLOduration=2.695599001 podStartE2EDuration="4.171845799s" podCreationTimestamp="2025-10-11 03:00:29 +0000 UTC" firstStartedPulling="2025-10-11 03:00:31.120138323 +0000 UTC m=+842.053226017" lastFinishedPulling="2025-10-11 03:00:32.596385171 +0000 UTC m=+843.529472815" observedRunningTime="2025-10-11 03:00:33.168988246 +0000 UTC m=+844.102075900" watchObservedRunningTime="2025-10-11 03:00:33.171845799 +0000 UTC m=+844.104933443" Oct 11 03:00:34 crc kubenswrapper[4953]: I1011 03:00:34.893915 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66" Oct 11 03:00:39 crc kubenswrapper[4953]: I1011 03:00:39.633928 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:39 crc kubenswrapper[4953]: I1011 03:00:39.634329 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:39 crc kubenswrapper[4953]: I1011 03:00:39.686583 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:40 crc kubenswrapper[4953]: I1011 03:00:40.163841 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:40 crc kubenswrapper[4953]: I1011 03:00:40.163982 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:40 crc kubenswrapper[4953]: I1011 03:00:40.211372 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:40 crc kubenswrapper[4953]: I1011 03:00:40.278289 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:40 crc kubenswrapper[4953]: I1011 03:00:40.285846 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:40 crc kubenswrapper[4953]: E1011 03:00:40.800727 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:52516b81f63966c7226180af634ff415f31174b382458f324b9ac0602eacc6a1\\\"\"" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" podUID="a319207e-2833-4dd6-b9db-60ce94fd41af" Oct 11 03:00:41 crc kubenswrapper[4953]: I1011 03:00:41.532483 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qvhk5"] Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.245245 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qvhk5" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="registry-server" containerID="cri-o://b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8" gracePeriod=2 Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.535300 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9dntb"] Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.703534 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.853967 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-utilities\") pod \"83cdd1a8-da96-4c7d-882d-067d1cb67198\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.854102 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-catalog-content\") pod \"83cdd1a8-da96-4c7d-882d-067d1cb67198\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.854160 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfm6t\" (UniqueName: \"kubernetes.io/projected/83cdd1a8-da96-4c7d-882d-067d1cb67198-kube-api-access-xfm6t\") pod \"83cdd1a8-da96-4c7d-882d-067d1cb67198\" (UID: \"83cdd1a8-da96-4c7d-882d-067d1cb67198\") " Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.856493 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-utilities" (OuterVolumeSpecName: "utilities") pod "83cdd1a8-da96-4c7d-882d-067d1cb67198" (UID: "83cdd1a8-da96-4c7d-882d-067d1cb67198"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.860735 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83cdd1a8-da96-4c7d-882d-067d1cb67198-kube-api-access-xfm6t" (OuterVolumeSpecName: "kube-api-access-xfm6t") pod "83cdd1a8-da96-4c7d-882d-067d1cb67198" (UID: "83cdd1a8-da96-4c7d-882d-067d1cb67198"). InnerVolumeSpecName "kube-api-access-xfm6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.956226 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:42 crc kubenswrapper[4953]: I1011 03:00:42.956302 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfm6t\" (UniqueName: \"kubernetes.io/projected/83cdd1a8-da96-4c7d-882d-067d1cb67198-kube-api-access-xfm6t\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.011015 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83cdd1a8-da96-4c7d-882d-067d1cb67198" (UID: "83cdd1a8-da96-4c7d-882d-067d1cb67198"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.059972 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83cdd1a8-da96-4c7d-882d-067d1cb67198-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.255131 4953 generic.go:334] "Generic (PLEG): container finished" podID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerID="b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8" exitCode=0 Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.255214 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerDied","Data":"b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8"} Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.256814 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qvhk5" event={"ID":"83cdd1a8-da96-4c7d-882d-067d1cb67198","Type":"ContainerDied","Data":"6aa3ccc63e53239937f9d9399d8e8ee3f7076d0943c573e4cea90544a790bcd5"} Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.255333 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qvhk5" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.256872 4953 scope.go:117] "RemoveContainer" containerID="b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.257175 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9dntb" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="registry-server" containerID="cri-o://a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f" gracePeriod=2 Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.285418 4953 scope.go:117] "RemoveContainer" containerID="61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.308351 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qvhk5"] Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.312733 4953 scope.go:117] "RemoveContainer" containerID="be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.317665 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qvhk5"] Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.423131 4953 scope.go:117] "RemoveContainer" containerID="b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8" Oct 11 03:00:43 crc kubenswrapper[4953]: E1011 03:00:43.423656 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8\": container with ID starting with b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8 not found: ID does not exist" containerID="b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.423690 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8"} err="failed to get container status \"b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8\": rpc error: code = NotFound desc = could not find container \"b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8\": container with ID starting with b8e55a8d2ac03761551d7f44759be76e2bb678ef5051398076d4c213bd9849a8 not found: ID does not exist" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.423713 4953 scope.go:117] "RemoveContainer" containerID="61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112" Oct 11 03:00:43 crc kubenswrapper[4953]: E1011 03:00:43.424006 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112\": container with ID starting with 61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112 not found: ID does not exist" containerID="61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.424027 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112"} err="failed to get container status \"61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112\": rpc error: code = NotFound desc = could not find container \"61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112\": container with ID starting with 61b0dc9b849af25f8f454dd66a4a96db71119048e520b4d8b424330c7c3c5112 not found: ID does not exist" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.424041 4953 scope.go:117] "RemoveContainer" containerID="be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163" Oct 11 03:00:43 crc kubenswrapper[4953]: E1011 03:00:43.424578 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163\": container with ID starting with be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163 not found: ID does not exist" containerID="be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.424617 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163"} err="failed to get container status \"be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163\": rpc error: code = NotFound desc = could not find container \"be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163\": container with ID starting with be9a360878294b6c274db6683b6a8fe91b75da98c8acd998284f42d39e77e163 not found: ID does not exist" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.470750 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-578874c84d-x526q" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.662039 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.767900 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flgg6\" (UniqueName: \"kubernetes.io/projected/1da46f32-93a2-4bcd-927b-71c7e63e33a3-kube-api-access-flgg6\") pod \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.768018 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-catalog-content\") pod \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.770678 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-utilities\") pod \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\" (UID: \"1da46f32-93a2-4bcd-927b-71c7e63e33a3\") " Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.770993 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-utilities" (OuterVolumeSpecName: "utilities") pod "1da46f32-93a2-4bcd-927b-71c7e63e33a3" (UID: "1da46f32-93a2-4bcd-927b-71c7e63e33a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.771171 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.773111 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1da46f32-93a2-4bcd-927b-71c7e63e33a3-kube-api-access-flgg6" (OuterVolumeSpecName: "kube-api-access-flgg6") pod "1da46f32-93a2-4bcd-927b-71c7e63e33a3" (UID: "1da46f32-93a2-4bcd-927b-71c7e63e33a3"). InnerVolumeSpecName "kube-api-access-flgg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.811250 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" path="/var/lib/kubelet/pods/83cdd1a8-da96-4c7d-882d-067d1cb67198/volumes" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.822325 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1da46f32-93a2-4bcd-927b-71c7e63e33a3" (UID: "1da46f32-93a2-4bcd-927b-71c7e63e33a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.872806 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flgg6\" (UniqueName: \"kubernetes.io/projected/1da46f32-93a2-4bcd-927b-71c7e63e33a3-kube-api-access-flgg6\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:43 crc kubenswrapper[4953]: I1011 03:00:43.872858 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1da46f32-93a2-4bcd-927b-71c7e63e33a3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.266918 4953 generic.go:334] "Generic (PLEG): container finished" podID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerID="a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f" exitCode=0 Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.267001 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9dntb" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.267006 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9dntb" event={"ID":"1da46f32-93a2-4bcd-927b-71c7e63e33a3","Type":"ContainerDied","Data":"a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f"} Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.267115 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9dntb" event={"ID":"1da46f32-93a2-4bcd-927b-71c7e63e33a3","Type":"ContainerDied","Data":"e03c215a3e867b273ea8e08b25412579c14ac486a9a4ac18834c9e2157415120"} Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.267135 4953 scope.go:117] "RemoveContainer" containerID="a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.284664 4953 scope.go:117] "RemoveContainer" containerID="a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.298766 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9dntb"] Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.304463 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9dntb"] Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.319580 4953 scope.go:117] "RemoveContainer" containerID="77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.342539 4953 scope.go:117] "RemoveContainer" containerID="a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f" Oct 11 03:00:44 crc kubenswrapper[4953]: E1011 03:00:44.343014 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f\": container with ID starting with a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f not found: ID does not exist" containerID="a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.343046 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f"} err="failed to get container status \"a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f\": rpc error: code = NotFound desc = could not find container \"a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f\": container with ID starting with a720e1baf743b50f70a46735aae9d2661bfb52eee25e4a049b434903d3ba716f not found: ID does not exist" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.343075 4953 scope.go:117] "RemoveContainer" containerID="a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb" Oct 11 03:00:44 crc kubenswrapper[4953]: E1011 03:00:44.343319 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb\": container with ID starting with a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb not found: ID does not exist" containerID="a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.343347 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb"} err="failed to get container status \"a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb\": rpc error: code = NotFound desc = could not find container \"a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb\": container with ID starting with a71496c1ccf0812ff07eabecb849a2baef82b9f948f3cc61bd880d15173b1ceb not found: ID does not exist" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.343364 4953 scope.go:117] "RemoveContainer" containerID="77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047" Oct 11 03:00:44 crc kubenswrapper[4953]: E1011 03:00:44.343590 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047\": container with ID starting with 77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047 not found: ID does not exist" containerID="77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047" Oct 11 03:00:44 crc kubenswrapper[4953]: I1011 03:00:44.343627 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047"} err="failed to get container status \"77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047\": rpc error: code = NotFound desc = could not find container \"77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047\": container with ID starting with 77e399583299b384104fa7a74d55ee8fef0a04366cb752e75c3f6d6faf747047 not found: ID does not exist" Oct 11 03:00:45 crc kubenswrapper[4953]: I1011 03:00:45.804543 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" path="/var/lib/kubelet/pods/1da46f32-93a2-4bcd-927b-71c7e63e33a3/volumes" Oct 11 03:00:57 crc kubenswrapper[4953]: I1011 03:00:57.385342 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" event={"ID":"a319207e-2833-4dd6-b9db-60ce94fd41af","Type":"ContainerStarted","Data":"2efa4526a5e363b6ec66397a1f7d1a9af03d6ce2b2c9b6a5292ed5b88c1fe817"} Oct 11 03:00:57 crc kubenswrapper[4953]: I1011 03:00:57.387452 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:00:57 crc kubenswrapper[4953]: I1011 03:00:57.423203 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" podStartSLOduration=4.303579884 podStartE2EDuration="55.423173792s" podCreationTimestamp="2025-10-11 03:00:02 +0000 UTC" firstStartedPulling="2025-10-11 03:00:05.327874802 +0000 UTC m=+816.260962446" lastFinishedPulling="2025-10-11 03:00:56.44746871 +0000 UTC m=+867.380556354" observedRunningTime="2025-10-11 03:00:57.419385115 +0000 UTC m=+868.352472769" watchObservedRunningTime="2025-10-11 03:00:57.423173792 +0000 UTC m=+868.356261446" Oct 11 03:01:03 crc kubenswrapper[4953]: I1011 03:01:03.682616 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-685c45897-wc8m8" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.146852 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-qm2f7"] Oct 11 03:01:20 crc kubenswrapper[4953]: E1011 03:01:20.147713 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="registry-server" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.147726 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="registry-server" Oct 11 03:01:20 crc kubenswrapper[4953]: E1011 03:01:20.147754 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="extract-utilities" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.147775 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="extract-utilities" Oct 11 03:01:20 crc kubenswrapper[4953]: E1011 03:01:20.147792 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="registry-server" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.147799 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="registry-server" Oct 11 03:01:20 crc kubenswrapper[4953]: E1011 03:01:20.147810 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="extract-content" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.147815 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="extract-content" Oct 11 03:01:20 crc kubenswrapper[4953]: E1011 03:01:20.147825 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="extract-content" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.147831 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="extract-content" Oct 11 03:01:20 crc kubenswrapper[4953]: E1011 03:01:20.147870 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="extract-utilities" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.147876 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="extract-utilities" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.148036 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1da46f32-93a2-4bcd-927b-71c7e63e33a3" containerName="registry-server" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.148056 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="83cdd1a8-da96-4c7d-882d-067d1cb67198" containerName="registry-server" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.148938 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.150834 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.150882 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-9fgxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.151175 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.151310 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.161156 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-qm2f7"] Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.203837 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-28lxq"] Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.204924 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.206933 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.217458 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-28lxq"] Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.259897 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l79sf\" (UniqueName: \"kubernetes.io/projected/f904fb28-cc00-49e6-aff9-3653cf5a2302-kube-api-access-l79sf\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.259938 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzlcm\" (UniqueName: \"kubernetes.io/projected/e97447bd-3094-4fb6-94ff-0f5577357b4b-kube-api-access-mzlcm\") pod \"dnsmasq-dns-675f4bcbfc-qm2f7\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.259974 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97447bd-3094-4fb6-94ff-0f5577357b4b-config\") pod \"dnsmasq-dns-675f4bcbfc-qm2f7\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.260005 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-config\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.260024 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.360987 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l79sf\" (UniqueName: \"kubernetes.io/projected/f904fb28-cc00-49e6-aff9-3653cf5a2302-kube-api-access-l79sf\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.361136 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzlcm\" (UniqueName: \"kubernetes.io/projected/e97447bd-3094-4fb6-94ff-0f5577357b4b-kube-api-access-mzlcm\") pod \"dnsmasq-dns-675f4bcbfc-qm2f7\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.361268 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97447bd-3094-4fb6-94ff-0f5577357b4b-config\") pod \"dnsmasq-dns-675f4bcbfc-qm2f7\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.361379 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-config\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.361461 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.362160 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97447bd-3094-4fb6-94ff-0f5577357b4b-config\") pod \"dnsmasq-dns-675f4bcbfc-qm2f7\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.362361 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.362377 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-config\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.380039 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzlcm\" (UniqueName: \"kubernetes.io/projected/e97447bd-3094-4fb6-94ff-0f5577357b4b-kube-api-access-mzlcm\") pod \"dnsmasq-dns-675f4bcbfc-qm2f7\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.381774 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l79sf\" (UniqueName: \"kubernetes.io/projected/f904fb28-cc00-49e6-aff9-3653cf5a2302-kube-api-access-l79sf\") pod \"dnsmasq-dns-78dd6ddcc-28lxq\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.465972 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.523261 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.823903 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-28lxq"] Oct 11 03:01:20 crc kubenswrapper[4953]: W1011 03:01:20.825815 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf904fb28_cc00_49e6_aff9_3653cf5a2302.slice/crio-a0ef2bf777a6b78af1bc8e542b4308ff711b084b59a826133ed41b2fe3a2021a WatchSource:0}: Error finding container a0ef2bf777a6b78af1bc8e542b4308ff711b084b59a826133ed41b2fe3a2021a: Status 404 returned error can't find the container with id a0ef2bf777a6b78af1bc8e542b4308ff711b084b59a826133ed41b2fe3a2021a Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.828156 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:01:20 crc kubenswrapper[4953]: I1011 03:01:20.942485 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-qm2f7"] Oct 11 03:01:20 crc kubenswrapper[4953]: W1011 03:01:20.944781 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode97447bd_3094_4fb6_94ff_0f5577357b4b.slice/crio-1738cffb64add89afbbfba8ca29fe6686f676a474540af999e5e907a49b26fe5 WatchSource:0}: Error finding container 1738cffb64add89afbbfba8ca29fe6686f676a474540af999e5e907a49b26fe5: Status 404 returned error can't find the container with id 1738cffb64add89afbbfba8ca29fe6686f676a474540af999e5e907a49b26fe5 Oct 11 03:01:21 crc kubenswrapper[4953]: I1011 03:01:21.617932 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" event={"ID":"f904fb28-cc00-49e6-aff9-3653cf5a2302","Type":"ContainerStarted","Data":"a0ef2bf777a6b78af1bc8e542b4308ff711b084b59a826133ed41b2fe3a2021a"} Oct 11 03:01:21 crc kubenswrapper[4953]: I1011 03:01:21.621391 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" event={"ID":"e97447bd-3094-4fb6-94ff-0f5577357b4b","Type":"ContainerStarted","Data":"1738cffb64add89afbbfba8ca29fe6686f676a474540af999e5e907a49b26fe5"} Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.176109 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-qm2f7"] Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.203490 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jtzd5"] Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.207589 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.225893 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jtzd5"] Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.304361 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.304495 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-config\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.304531 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7lkk\" (UniqueName: \"kubernetes.io/projected/9712be94-0170-464b-806d-3257d0ec1f28-kube-api-access-m7lkk\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.405523 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-config\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.405572 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7lkk\" (UniqueName: \"kubernetes.io/projected/9712be94-0170-464b-806d-3257d0ec1f28-kube-api-access-m7lkk\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.405601 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.406530 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.407056 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-config\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.445567 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7lkk\" (UniqueName: \"kubernetes.io/projected/9712be94-0170-464b-806d-3257d0ec1f28-kube-api-access-m7lkk\") pod \"dnsmasq-dns-5ccc8479f9-jtzd5\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.464390 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-28lxq"] Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.477212 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g5dmm"] Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.479194 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.493694 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g5dmm"] Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.506385 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.506436 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-config\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.506461 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jx5f\" (UniqueName: \"kubernetes.io/projected/16ceccb6-50bc-47e9-83c9-82117806aa76-kube-api-access-6jx5f\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.549643 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.607498 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.607767 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-config\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.607802 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jx5f\" (UniqueName: \"kubernetes.io/projected/16ceccb6-50bc-47e9-83c9-82117806aa76-kube-api-access-6jx5f\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.608534 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.608624 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-config\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.650403 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jx5f\" (UniqueName: \"kubernetes.io/projected/16ceccb6-50bc-47e9-83c9-82117806aa76-kube-api-access-6jx5f\") pod \"dnsmasq-dns-57d769cc4f-g5dmm\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:23 crc kubenswrapper[4953]: I1011 03:01:23.809830 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.064435 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jtzd5"] Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.344171 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.347287 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.353262 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-b8hzt" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.354482 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.354794 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.355711 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g5dmm"] Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.355817 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.355851 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.355940 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.357193 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.360332 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428405 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428453 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428474 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428503 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428532 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428545 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54677831-1449-4579-8948-fbf874123d6b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428563 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428684 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54677831-1449-4579-8948-fbf874123d6b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428735 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428761 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmd6b\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-kube-api-access-mmd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.428778 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.529908 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54677831-1449-4579-8948-fbf874123d6b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.529981 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530057 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmd6b\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-kube-api-access-mmd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530074 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530103 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530117 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530132 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530155 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530214 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530229 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54677831-1449-4579-8948-fbf874123d6b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530247 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.530643 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.531192 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.534866 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54677831-1449-4579-8948-fbf874123d6b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.535788 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.535783 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.537337 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.538466 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54677831-1449-4579-8948-fbf874123d6b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.539463 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.541195 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.549777 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.550576 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmd6b\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-kube-api-access-mmd6b\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.568801 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.601920 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.603772 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.605829 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.610859 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-pr2k2" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.611198 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.612069 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.612204 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.612384 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.612682 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.619158 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.630884 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.630917 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.630958 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmtnr\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-kube-api-access-nmtnr\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.630976 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.630993 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fee3eac1-8d2e-4182-a666-d9d15aaccd23-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.631007 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-config-data\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.631036 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.631065 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.631084 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.631104 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.631121 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fee3eac1-8d2e-4182-a666-d9d15aaccd23-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.655744 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" event={"ID":"9712be94-0170-464b-806d-3257d0ec1f28","Type":"ContainerStarted","Data":"4684fd64e3b5cf7e6b75a92937250f1ea4515c9272be2997c447a4a0e4e64bc3"} Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.671052 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.731934 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmtnr\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-kube-api-access-nmtnr\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.731980 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732002 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fee3eac1-8d2e-4182-a666-d9d15aaccd23-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732025 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-config-data\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732049 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732077 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732098 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732155 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732177 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fee3eac1-8d2e-4182-a666-d9d15aaccd23-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732238 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732259 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732456 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.732677 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.733314 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-config-data\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.733321 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.734280 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.734295 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.736183 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fee3eac1-8d2e-4182-a666-d9d15aaccd23-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.736662 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.741879 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.751091 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmtnr\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-kube-api-access-nmtnr\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.754401 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fee3eac1-8d2e-4182-a666-d9d15aaccd23-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.756596 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " pod="openstack/rabbitmq-server-0" Oct 11 03:01:24 crc kubenswrapper[4953]: I1011 03:01:24.953555 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.274642 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.276861 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.282009 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-x8g92" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.282053 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.282073 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.282016 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.283132 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.290126 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.295375 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473295 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/179f5065-b743-428e-af8e-9e95fb0ea966-config-data-generated\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473420 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-config-data-default\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473440 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473458 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-kolla-config\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473595 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-secrets\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473748 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473793 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zdfv\" (UniqueName: \"kubernetes.io/projected/179f5065-b743-428e-af8e-9e95fb0ea966-kube-api-access-2zdfv\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473886 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.473937 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-operator-scripts\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.575850 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-secrets\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.575909 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.575931 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zdfv\" (UniqueName: \"kubernetes.io/projected/179f5065-b743-428e-af8e-9e95fb0ea966-kube-api-access-2zdfv\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.575962 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.575984 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-operator-scripts\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.576016 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/179f5065-b743-428e-af8e-9e95fb0ea966-config-data-generated\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.576059 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-config-data-default\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.576077 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.576092 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-kolla-config\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.576529 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.576963 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/179f5065-b743-428e-af8e-9e95fb0ea966-config-data-generated\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.580554 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-config-data-default\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.581038 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-kolla-config\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.584792 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/179f5065-b743-428e-af8e-9e95fb0ea966-operator-scripts\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.585758 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-secrets\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.596504 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zdfv\" (UniqueName: \"kubernetes.io/projected/179f5065-b743-428e-af8e-9e95fb0ea966-kube-api-access-2zdfv\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.602426 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.607192 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/179f5065-b743-428e-af8e-9e95fb0ea966-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.607358 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"179f5065-b743-428e-af8e-9e95fb0ea966\") " pod="openstack/openstack-galera-0" Oct 11 03:01:26 crc kubenswrapper[4953]: I1011 03:01:26.905393 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: W1011 03:01:27.244550 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16ceccb6_50bc_47e9_83c9_82117806aa76.slice/crio-b5a21a2d162472c2aa7355fe64b0aa90caed604a832a33e260364f0c64cfd892 WatchSource:0}: Error finding container b5a21a2d162472c2aa7355fe64b0aa90caed604a832a33e260364f0c64cfd892: Status 404 returned error can't find the container with id b5a21a2d162472c2aa7355fe64b0aa90caed604a832a33e260364f0c64cfd892 Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.616472 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.618835 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.624231 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.624828 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-q8crf" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.624988 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.625648 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.634032 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.682377 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" event={"ID":"16ceccb6-50bc-47e9-83c9-82117806aa76","Type":"ContainerStarted","Data":"b5a21a2d162472c2aa7355fe64b0aa90caed604a832a33e260364f0c64cfd892"} Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.795185 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.795234 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.795283 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.795324 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.795349 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.795574 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f356e594-4357-488b-8b0a-a549d0a04531-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.796119 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.796162 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6ftp\" (UniqueName: \"kubernetes.io/projected/f356e594-4357-488b-8b0a-a549d0a04531-kube-api-access-q6ftp\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.796209 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.897692 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.897751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.897773 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.897791 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.898631 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.898662 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.898699 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f356e594-4357-488b-8b0a-a549d0a04531-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.898809 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.898915 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6ftp\" (UniqueName: \"kubernetes.io/projected/f356e594-4357-488b-8b0a-a549d0a04531-kube-api-access-q6ftp\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.899031 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.899300 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.899343 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.899556 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f356e594-4357-488b-8b0a-a549d0a04531-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.904456 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f356e594-4357-488b-8b0a-a549d0a04531-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.908136 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.908322 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.918137 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f356e594-4357-488b-8b0a-a549d0a04531-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.932190 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6ftp\" (UniqueName: \"kubernetes.io/projected/f356e594-4357-488b-8b0a-a549d0a04531-kube-api-access-q6ftp\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:27 crc kubenswrapper[4953]: I1011 03:01:27.957752 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f356e594-4357-488b-8b0a-a549d0a04531\") " pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.099706 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.100642 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.103308 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.103877 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-59dlb" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.104262 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.120009 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.203477 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bltzr\" (UniqueName: \"kubernetes.io/projected/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-kube-api-access-bltzr\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.203523 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-kolla-config\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.203540 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.203708 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.203801 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-config-data\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.253673 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.304704 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.304749 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-config-data\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.304850 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bltzr\" (UniqueName: \"kubernetes.io/projected/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-kube-api-access-bltzr\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.304869 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-kolla-config\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.304897 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.305917 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-config-data\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.306644 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-kolla-config\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.309377 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.309408 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.323075 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bltzr\" (UniqueName: \"kubernetes.io/projected/cdc88039-fb87-4db1-a4fc-c808a8a7b70d-kube-api-access-bltzr\") pod \"memcached-0\" (UID: \"cdc88039-fb87-4db1-a4fc-c808a8a7b70d\") " pod="openstack/memcached-0" Oct 11 03:01:28 crc kubenswrapper[4953]: I1011 03:01:28.432081 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.637362 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.638689 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.641417 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-46bqh" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.647765 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.649026 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8qkv\" (UniqueName: \"kubernetes.io/projected/8c48e3d3-4846-4492-930a-110c3bf715b1-kube-api-access-b8qkv\") pod \"kube-state-metrics-0\" (UID: \"8c48e3d3-4846-4492-930a-110c3bf715b1\") " pod="openstack/kube-state-metrics-0" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.750648 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8qkv\" (UniqueName: \"kubernetes.io/projected/8c48e3d3-4846-4492-930a-110c3bf715b1-kube-api-access-b8qkv\") pod \"kube-state-metrics-0\" (UID: \"8c48e3d3-4846-4492-930a-110c3bf715b1\") " pod="openstack/kube-state-metrics-0" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.790206 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8qkv\" (UniqueName: \"kubernetes.io/projected/8c48e3d3-4846-4492-930a-110c3bf715b1-kube-api-access-b8qkv\") pod \"kube-state-metrics-0\" (UID: \"8c48e3d3-4846-4492-930a-110c3bf715b1\") " pod="openstack/kube-state-metrics-0" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.963571 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-46bqh" Oct 11 03:01:29 crc kubenswrapper[4953]: I1011 03:01:29.971970 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.093897 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2mxr7"] Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.095336 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.097961 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.098265 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.098427 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-qx54s" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.104180 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2mxr7"] Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.108898 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-w92fd"] Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.111196 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.196592 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-w92fd"] Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.202740 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845a96b-97b5-4417-be95-7a4760a84897-ovn-controller-tls-certs\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.202792 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-run\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.202850 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e845a96b-97b5-4417-be95-7a4760a84897-combined-ca-bundle\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.202922 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-log-ovn\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.202948 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e845a96b-97b5-4417-be95-7a4760a84897-scripts\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.202976 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-run-ovn\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.203006 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npbch\" (UniqueName: \"kubernetes.io/projected/e845a96b-97b5-4417-be95-7a4760a84897-kube-api-access-npbch\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304670 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-etc-ovs\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304722 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-run-ovn\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304739 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-run\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304781 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npbch\" (UniqueName: \"kubernetes.io/projected/e845a96b-97b5-4417-be95-7a4760a84897-kube-api-access-npbch\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304803 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845a96b-97b5-4417-be95-7a4760a84897-ovn-controller-tls-certs\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304827 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-run\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304847 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae4146e-934f-4986-ba64-20add72d9c12-scripts\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304882 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e845a96b-97b5-4417-be95-7a4760a84897-combined-ca-bundle\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304902 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-lib\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.304923 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-log\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.305157 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr6mq\" (UniqueName: \"kubernetes.io/projected/bae4146e-934f-4986-ba64-20add72d9c12-kube-api-access-cr6mq\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.305191 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-log-ovn\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.305213 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e845a96b-97b5-4417-be95-7a4760a84897-scripts\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.306142 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-run\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.306263 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-run-ovn\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.307714 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e845a96b-97b5-4417-be95-7a4760a84897-scripts\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.309978 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e845a96b-97b5-4417-be95-7a4760a84897-var-log-ovn\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.319664 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e845a96b-97b5-4417-be95-7a4760a84897-combined-ca-bundle\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.325206 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e845a96b-97b5-4417-be95-7a4760a84897-ovn-controller-tls-certs\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.330262 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npbch\" (UniqueName: \"kubernetes.io/projected/e845a96b-97b5-4417-be95-7a4760a84897-kube-api-access-npbch\") pod \"ovn-controller-2mxr7\" (UID: \"e845a96b-97b5-4417-be95-7a4760a84897\") " pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407416 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae4146e-934f-4986-ba64-20add72d9c12-scripts\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407520 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-lib\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407568 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-log\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407652 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr6mq\" (UniqueName: \"kubernetes.io/projected/bae4146e-934f-4986-ba64-20add72d9c12-kube-api-access-cr6mq\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407723 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-etc-ovs\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407754 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-run\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.407945 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-run\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.408277 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-lib\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.408439 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-var-log\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.409076 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/bae4146e-934f-4986-ba64-20add72d9c12-etc-ovs\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.412242 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bae4146e-934f-4986-ba64-20add72d9c12-scripts\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.432535 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr6mq\" (UniqueName: \"kubernetes.io/projected/bae4146e-934f-4986-ba64-20add72d9c12-kube-api-access-cr6mq\") pod \"ovn-controller-ovs-w92fd\" (UID: \"bae4146e-934f-4986-ba64-20add72d9c12\") " pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.476250 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:33 crc kubenswrapper[4953]: I1011 03:01:33.489323 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.087965 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.089989 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.092221 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.092364 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-sr9xz" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.117273 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.117366 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.117385 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.124411 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 11 03:01:37 crc kubenswrapper[4953]: E1011 03:01:37.173409 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 11 03:01:37 crc kubenswrapper[4953]: E1011 03:01:37.173579 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l79sf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-28lxq_openstack(f904fb28-cc00-49e6-aff9-3653cf5a2302): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 03:01:37 crc kubenswrapper[4953]: E1011 03:01:37.174827 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" podUID="f904fb28-cc00-49e6-aff9-3653cf5a2302" Oct 11 03:01:37 crc kubenswrapper[4953]: E1011 03:01:37.212757 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 11 03:01:37 crc kubenswrapper[4953]: E1011 03:01:37.213468 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mzlcm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-qm2f7_openstack(e97447bd-3094-4fb6-94ff-0f5577357b4b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 03:01:37 crc kubenswrapper[4953]: E1011 03:01:37.214693 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" podUID="e97447bd-3094-4fb6-94ff-0f5577357b4b" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272508 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/127eaeb9-abc1-44ee-90b7-07bbf0a85837-config\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272577 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272626 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/127eaeb9-abc1-44ee-90b7-07bbf0a85837-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272651 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272677 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/127eaeb9-abc1-44ee-90b7-07bbf0a85837-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272769 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272802 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcplg\" (UniqueName: \"kubernetes.io/projected/127eaeb9-abc1-44ee-90b7-07bbf0a85837-kube-api-access-lcplg\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.272835 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.294587 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.296252 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.301958 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.302182 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-ffxpx" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.302803 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.303840 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.307767 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377137 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377191 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcplg\" (UniqueName: \"kubernetes.io/projected/127eaeb9-abc1-44ee-90b7-07bbf0a85837-kube-api-access-lcplg\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377234 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377280 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/127eaeb9-abc1-44ee-90b7-07bbf0a85837-config\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377321 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377343 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/127eaeb9-abc1-44ee-90b7-07bbf0a85837-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.377363 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.390484 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/127eaeb9-abc1-44ee-90b7-07bbf0a85837-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.403187 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.405214 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/127eaeb9-abc1-44ee-90b7-07bbf0a85837-config\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.410077 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/127eaeb9-abc1-44ee-90b7-07bbf0a85837-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.422301 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/127eaeb9-abc1-44ee-90b7-07bbf0a85837-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.429218 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.430625 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.435007 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/127eaeb9-abc1-44ee-90b7-07bbf0a85837-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.459903 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcplg\" (UniqueName: \"kubernetes.io/projected/127eaeb9-abc1-44ee-90b7-07bbf0a85837-kube-api-access-lcplg\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.490297 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"127eaeb9-abc1-44ee-90b7-07bbf0a85837\") " pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512143 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ccfff6f8-4954-46d7-ba26-c317b321a169-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512243 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512271 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfff6f8-4954-46d7-ba26-c317b321a169-config\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512295 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrh92\" (UniqueName: \"kubernetes.io/projected/ccfff6f8-4954-46d7-ba26-c317b321a169-kube-api-access-qrh92\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512536 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ccfff6f8-4954-46d7-ba26-c317b321a169-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512555 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512580 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.512683 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.617092 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.617147 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfff6f8-4954-46d7-ba26-c317b321a169-config\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.617181 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrh92\" (UniqueName: \"kubernetes.io/projected/ccfff6f8-4954-46d7-ba26-c317b321a169-kube-api-access-qrh92\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.619239 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ccfff6f8-4954-46d7-ba26-c317b321a169-config\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.620397 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ccfff6f8-4954-46d7-ba26-c317b321a169-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.620451 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.620495 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.620574 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.620650 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ccfff6f8-4954-46d7-ba26-c317b321a169-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.621407 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ccfff6f8-4954-46d7-ba26-c317b321a169-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.622170 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ccfff6f8-4954-46d7-ba26-c317b321a169-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.623122 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.627724 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.628971 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.629952 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccfff6f8-4954-46d7-ba26-c317b321a169-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.650164 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrh92\" (UniqueName: \"kubernetes.io/projected/ccfff6f8-4954-46d7-ba26-c317b321a169-kube-api-access-qrh92\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.657731 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ccfff6f8-4954-46d7-ba26-c317b321a169\") " pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.733235 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.767115 4953 generic.go:334] "Generic (PLEG): container finished" podID="9712be94-0170-464b-806d-3257d0ec1f28" containerID="85674dcd0b5283cc9b1d1028fe0f98f63fc2733b0f81ec7f00f9494acc6a0203" exitCode=0 Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.767183 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" event={"ID":"9712be94-0170-464b-806d-3257d0ec1f28","Type":"ContainerDied","Data":"85674dcd0b5283cc9b1d1028fe0f98f63fc2733b0f81ec7f00f9494acc6a0203"} Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.770410 4953 generic.go:334] "Generic (PLEG): container finished" podID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerID="36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd" exitCode=0 Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.770719 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" event={"ID":"16ceccb6-50bc-47e9-83c9-82117806aa76","Type":"ContainerDied","Data":"36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd"} Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.771600 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.849293 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.872752 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: W1011 03:01:37.879170 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod179f5065_b743_428e_af8e_9e95fb0ea966.slice/crio-64a9862aa74d19490bd2c4e7ae4975acfa3c43db3427ad426109040ba82ba324 WatchSource:0}: Error finding container 64a9862aa74d19490bd2c4e7ae4975acfa3c43db3427ad426109040ba82ba324: Status 404 returned error can't find the container with id 64a9862aa74d19490bd2c4e7ae4975acfa3c43db3427ad426109040ba82ba324 Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.934386 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.947178 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: I1011 03:01:37.953623 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:01:37 crc kubenswrapper[4953]: W1011 03:01:37.970516 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c48e3d3_4846_4492_930a_110c3bf715b1.slice/crio-ea02eb5a6f81e0bcd9732b97ed669796c546b42cbf00969963bcc3de4acd0114 WatchSource:0}: Error finding container ea02eb5a6f81e0bcd9732b97ed669796c546b42cbf00969963bcc3de4acd0114: Status 404 returned error can't find the container with id ea02eb5a6f81e0bcd9732b97ed669796c546b42cbf00969963bcc3de4acd0114 Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.326594 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2mxr7"] Oct 11 03:01:38 crc kubenswrapper[4953]: W1011 03:01:38.332015 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode845a96b_97b5_4417_be95_7a4760a84897.slice/crio-ad99286513eab3b7b2bcc7edffcf66a8c6d9d99c1ea841068832d0c94905284d WatchSource:0}: Error finding container ad99286513eab3b7b2bcc7edffcf66a8c6d9d99c1ea841068832d0c94905284d: Status 404 returned error can't find the container with id ad99286513eab3b7b2bcc7edffcf66a8c6d9d99c1ea841068832d0c94905284d Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.332052 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.366342 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.370448 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.441568 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97447bd-3094-4fb6-94ff-0f5577357b4b-config\") pod \"e97447bd-3094-4fb6-94ff-0f5577357b4b\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.441629 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzlcm\" (UniqueName: \"kubernetes.io/projected/e97447bd-3094-4fb6-94ff-0f5577357b4b-kube-api-access-mzlcm\") pod \"e97447bd-3094-4fb6-94ff-0f5577357b4b\" (UID: \"e97447bd-3094-4fb6-94ff-0f5577357b4b\") " Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.443224 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e97447bd-3094-4fb6-94ff-0f5577357b4b-config" (OuterVolumeSpecName: "config") pod "e97447bd-3094-4fb6-94ff-0f5577357b4b" (UID: "e97447bd-3094-4fb6-94ff-0f5577357b4b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.449733 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e97447bd-3094-4fb6-94ff-0f5577357b4b-kube-api-access-mzlcm" (OuterVolumeSpecName: "kube-api-access-mzlcm") pod "e97447bd-3094-4fb6-94ff-0f5577357b4b" (UID: "e97447bd-3094-4fb6-94ff-0f5577357b4b"). InnerVolumeSpecName "kube-api-access-mzlcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.543118 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-config\") pod \"f904fb28-cc00-49e6-aff9-3653cf5a2302\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.543303 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-dns-svc\") pod \"f904fb28-cc00-49e6-aff9-3653cf5a2302\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.543349 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l79sf\" (UniqueName: \"kubernetes.io/projected/f904fb28-cc00-49e6-aff9-3653cf5a2302-kube-api-access-l79sf\") pod \"f904fb28-cc00-49e6-aff9-3653cf5a2302\" (UID: \"f904fb28-cc00-49e6-aff9-3653cf5a2302\") " Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.543692 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e97447bd-3094-4fb6-94ff-0f5577357b4b-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.543723 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzlcm\" (UniqueName: \"kubernetes.io/projected/e97447bd-3094-4fb6-94ff-0f5577357b4b-kube-api-access-mzlcm\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.544365 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-config" (OuterVolumeSpecName: "config") pod "f904fb28-cc00-49e6-aff9-3653cf5a2302" (UID: "f904fb28-cc00-49e6-aff9-3653cf5a2302"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.544722 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f904fb28-cc00-49e6-aff9-3653cf5a2302" (UID: "f904fb28-cc00-49e6-aff9-3653cf5a2302"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.551800 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f904fb28-cc00-49e6-aff9-3653cf5a2302-kube-api-access-l79sf" (OuterVolumeSpecName: "kube-api-access-l79sf") pod "f904fb28-cc00-49e6-aff9-3653cf5a2302" (UID: "f904fb28-cc00-49e6-aff9-3653cf5a2302"). InnerVolumeSpecName "kube-api-access-l79sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.575277 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-w92fd"] Oct 11 03:01:38 crc kubenswrapper[4953]: W1011 03:01:38.593737 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbae4146e_934f_4986_ba64_20add72d9c12.slice/crio-b6b024e464addebcc1ce15f3d422853899ec54b6e5f397d23b3ec4132a003ac6 WatchSource:0}: Error finding container b6b024e464addebcc1ce15f3d422853899ec54b6e5f397d23b3ec4132a003ac6: Status 404 returned error can't find the container with id b6b024e464addebcc1ce15f3d422853899ec54b6e5f397d23b3ec4132a003ac6 Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.623616 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.645237 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.645276 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l79sf\" (UniqueName: \"kubernetes.io/projected/f904fb28-cc00-49e6-aff9-3653cf5a2302-kube-api-access-l79sf\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.645291 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f904fb28-cc00-49e6-aff9-3653cf5a2302-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:38 crc kubenswrapper[4953]: W1011 03:01:38.645537 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccfff6f8_4954_46d7_ba26_c317b321a169.slice/crio-e3a72f9aa1d758eb09c0efb5f1a655025c7a2aa19ffe4bbcf92eb0a9ed96360c WatchSource:0}: Error finding container e3a72f9aa1d758eb09c0efb5f1a655025c7a2aa19ffe4bbcf92eb0a9ed96360c: Status 404 returned error can't find the container with id e3a72f9aa1d758eb09c0efb5f1a655025c7a2aa19ffe4bbcf92eb0a9ed96360c Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.779101 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"cdc88039-fb87-4db1-a4fc-c808a8a7b70d","Type":"ContainerStarted","Data":"b1e1c63a4f35e71378bacf26b4ea90a8f15778fdd6cda97ea9ae00a17d496320"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.780719 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8c48e3d3-4846-4492-930a-110c3bf715b1","Type":"ContainerStarted","Data":"ea02eb5a6f81e0bcd9732b97ed669796c546b42cbf00969963bcc3de4acd0114"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.782219 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-w92fd" event={"ID":"bae4146e-934f-4986-ba64-20add72d9c12","Type":"ContainerStarted","Data":"b6b024e464addebcc1ce15f3d422853899ec54b6e5f397d23b3ec4132a003ac6"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.784742 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" event={"ID":"9712be94-0170-464b-806d-3257d0ec1f28","Type":"ContainerStarted","Data":"9f6158c6e84383696d868b13539173bf6d76ea7cc6cf63d53cd3c1f8ad2a05fe"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.784897 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.786686 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"179f5065-b743-428e-af8e-9e95fb0ea966","Type":"ContainerStarted","Data":"64a9862aa74d19490bd2c4e7ae4975acfa3c43db3427ad426109040ba82ba324"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.788841 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.788840 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-28lxq" event={"ID":"f904fb28-cc00-49e6-aff9-3653cf5a2302","Type":"ContainerDied","Data":"a0ef2bf777a6b78af1bc8e542b4308ff711b084b59a826133ed41b2fe3a2021a"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.794364 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" event={"ID":"16ceccb6-50bc-47e9-83c9-82117806aa76","Type":"ContainerStarted","Data":"c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.795332 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.797079 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ccfff6f8-4954-46d7-ba26-c317b321a169","Type":"ContainerStarted","Data":"e3a72f9aa1d758eb09c0efb5f1a655025c7a2aa19ffe4bbcf92eb0a9ed96360c"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.798452 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" event={"ID":"e97447bd-3094-4fb6-94ff-0f5577357b4b","Type":"ContainerDied","Data":"1738cffb64add89afbbfba8ca29fe6686f676a474540af999e5e907a49b26fe5"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.798561 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-qm2f7" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.808872 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" podStartSLOduration=2.52338574 podStartE2EDuration="15.808853396s" podCreationTimestamp="2025-10-11 03:01:23 +0000 UTC" firstStartedPulling="2025-10-11 03:01:24.071952614 +0000 UTC m=+895.005040258" lastFinishedPulling="2025-10-11 03:01:37.35742028 +0000 UTC m=+908.290507914" observedRunningTime="2025-10-11 03:01:38.808141338 +0000 UTC m=+909.741228972" watchObservedRunningTime="2025-10-11 03:01:38.808853396 +0000 UTC m=+909.741941040" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.818743 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fee3eac1-8d2e-4182-a666-d9d15aaccd23","Type":"ContainerStarted","Data":"ed9fb61ba2a6958c9b398cf92f5ba4db625ae17424f2b387102ed9f0be6f16aa"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.819874 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f356e594-4357-488b-8b0a-a549d0a04531","Type":"ContainerStarted","Data":"7367f8fd9b7a5f32dc52c650ab652ca7c2517d67563d35b4b1904f1ed0d6ca1c"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.820548 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7" event={"ID":"e845a96b-97b5-4417-be95-7a4760a84897","Type":"ContainerStarted","Data":"ad99286513eab3b7b2bcc7edffcf66a8c6d9d99c1ea841068832d0c94905284d"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.821956 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54677831-1449-4579-8948-fbf874123d6b","Type":"ContainerStarted","Data":"d2647cbbfda343516adaf3426ee1bd6c4bfb038a780f41c19fdb50b8bdd67511"} Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.828924 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" podStartSLOduration=5.7310744190000005 podStartE2EDuration="15.828911558s" podCreationTimestamp="2025-10-11 03:01:23 +0000 UTC" firstStartedPulling="2025-10-11 03:01:27.253433615 +0000 UTC m=+898.186521269" lastFinishedPulling="2025-10-11 03:01:37.351270764 +0000 UTC m=+908.284358408" observedRunningTime="2025-10-11 03:01:38.827387749 +0000 UTC m=+909.760475393" watchObservedRunningTime="2025-10-11 03:01:38.828911558 +0000 UTC m=+909.761999202" Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.869573 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-28lxq"] Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.876073 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-28lxq"] Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.896478 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-qm2f7"] Oct 11 03:01:38 crc kubenswrapper[4953]: I1011 03:01:38.911128 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-qm2f7"] Oct 11 03:01:39 crc kubenswrapper[4953]: I1011 03:01:39.149557 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 11 03:01:39 crc kubenswrapper[4953]: I1011 03:01:39.807541 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e97447bd-3094-4fb6-94ff-0f5577357b4b" path="/var/lib/kubelet/pods/e97447bd-3094-4fb6-94ff-0f5577357b4b/volumes" Oct 11 03:01:39 crc kubenswrapper[4953]: I1011 03:01:39.808056 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f904fb28-cc00-49e6-aff9-3653cf5a2302" path="/var/lib/kubelet/pods/f904fb28-cc00-49e6-aff9-3653cf5a2302/volumes" Oct 11 03:01:39 crc kubenswrapper[4953]: I1011 03:01:39.834788 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"127eaeb9-abc1-44ee-90b7-07bbf0a85837","Type":"ContainerStarted","Data":"02f89fb09ca6fc0447fe244327ee366935eac5b317e0a7719bd8a64c8f51f7d3"} Oct 11 03:01:41 crc kubenswrapper[4953]: I1011 03:01:41.317313 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:01:41 crc kubenswrapper[4953]: I1011 03:01:41.317707 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:01:43 crc kubenswrapper[4953]: I1011 03:01:43.551929 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:43 crc kubenswrapper[4953]: I1011 03:01:43.810747 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:01:43 crc kubenswrapper[4953]: I1011 03:01:43.877283 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jtzd5"] Oct 11 03:01:43 crc kubenswrapper[4953]: I1011 03:01:43.895786 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" podUID="9712be94-0170-464b-806d-3257d0ec1f28" containerName="dnsmasq-dns" containerID="cri-o://9f6158c6e84383696d868b13539173bf6d76ea7cc6cf63d53cd3c1f8ad2a05fe" gracePeriod=10 Oct 11 03:01:44 crc kubenswrapper[4953]: I1011 03:01:44.911558 4953 generic.go:334] "Generic (PLEG): container finished" podID="9712be94-0170-464b-806d-3257d0ec1f28" containerID="9f6158c6e84383696d868b13539173bf6d76ea7cc6cf63d53cd3c1f8ad2a05fe" exitCode=0 Oct 11 03:01:44 crc kubenswrapper[4953]: I1011 03:01:44.911689 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" event={"ID":"9712be94-0170-464b-806d-3257d0ec1f28","Type":"ContainerDied","Data":"9f6158c6e84383696d868b13539173bf6d76ea7cc6cf63d53cd3c1f8ad2a05fe"} Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.679953 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.792192 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7lkk\" (UniqueName: \"kubernetes.io/projected/9712be94-0170-464b-806d-3257d0ec1f28-kube-api-access-m7lkk\") pod \"9712be94-0170-464b-806d-3257d0ec1f28\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.792378 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-dns-svc\") pod \"9712be94-0170-464b-806d-3257d0ec1f28\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.792410 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-config\") pod \"9712be94-0170-464b-806d-3257d0ec1f28\" (UID: \"9712be94-0170-464b-806d-3257d0ec1f28\") " Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.796042 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9712be94-0170-464b-806d-3257d0ec1f28-kube-api-access-m7lkk" (OuterVolumeSpecName: "kube-api-access-m7lkk") pod "9712be94-0170-464b-806d-3257d0ec1f28" (UID: "9712be94-0170-464b-806d-3257d0ec1f28"). InnerVolumeSpecName "kube-api-access-m7lkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.828142 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9712be94-0170-464b-806d-3257d0ec1f28" (UID: "9712be94-0170-464b-806d-3257d0ec1f28"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.832855 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-config" (OuterVolumeSpecName: "config") pod "9712be94-0170-464b-806d-3257d0ec1f28" (UID: "9712be94-0170-464b-806d-3257d0ec1f28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.894856 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.894897 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9712be94-0170-464b-806d-3257d0ec1f28-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.894910 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7lkk\" (UniqueName: \"kubernetes.io/projected/9712be94-0170-464b-806d-3257d0ec1f28-kube-api-access-m7lkk\") on node \"crc\" DevicePath \"\"" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.926595 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" event={"ID":"9712be94-0170-464b-806d-3257d0ec1f28","Type":"ContainerDied","Data":"4684fd64e3b5cf7e6b75a92937250f1ea4515c9272be2997c447a4a0e4e64bc3"} Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.926666 4953 scope.go:117] "RemoveContainer" containerID="9f6158c6e84383696d868b13539173bf6d76ea7cc6cf63d53cd3c1f8ad2a05fe" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.926682 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jtzd5" Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.957358 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jtzd5"] Oct 11 03:01:46 crc kubenswrapper[4953]: I1011 03:01:46.964766 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jtzd5"] Oct 11 03:01:47 crc kubenswrapper[4953]: I1011 03:01:47.012296 4953 scope.go:117] "RemoveContainer" containerID="85674dcd0b5283cc9b1d1028fe0f98f63fc2733b0f81ec7f00f9494acc6a0203" Oct 11 03:01:47 crc kubenswrapper[4953]: I1011 03:01:47.806406 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9712be94-0170-464b-806d-3257d0ec1f28" path="/var/lib/kubelet/pods/9712be94-0170-464b-806d-3257d0ec1f28/volumes" Oct 11 03:01:48 crc kubenswrapper[4953]: I1011 03:01:48.943875 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"179f5065-b743-428e-af8e-9e95fb0ea966","Type":"ContainerStarted","Data":"1ec00d701e0cac6861be9f27897a953c855cf711515029c444304733234d97c7"} Oct 11 03:01:48 crc kubenswrapper[4953]: I1011 03:01:48.947761 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ccfff6f8-4954-46d7-ba26-c317b321a169","Type":"ContainerStarted","Data":"16733547f8f691d8b08b6e345e6162d45a44e69c59ef8df58379f4eb9a167b46"} Oct 11 03:01:48 crc kubenswrapper[4953]: I1011 03:01:48.951135 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"127eaeb9-abc1-44ee-90b7-07bbf0a85837","Type":"ContainerStarted","Data":"ff9150ca888d7671388ae9389d83464b5d76dbf58c2877618fdecac444a2e1ee"} Oct 11 03:01:48 crc kubenswrapper[4953]: I1011 03:01:48.953186 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f356e594-4357-488b-8b0a-a549d0a04531","Type":"ContainerStarted","Data":"df7238452fe85cf4799398eba36fc407b6489d3823e960f84fc49fdfb7705e53"} Oct 11 03:01:48 crc kubenswrapper[4953]: I1011 03:01:48.955545 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"cdc88039-fb87-4db1-a4fc-c808a8a7b70d","Type":"ContainerStarted","Data":"9fbd67de10509b5ca522f7a8e5226b3d60a6b246bd35dba3bf298ae3a2304857"} Oct 11 03:01:48 crc kubenswrapper[4953]: I1011 03:01:48.956132 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.024908 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=12.162589685 podStartE2EDuration="21.024882663s" podCreationTimestamp="2025-10-11 03:01:28 +0000 UTC" firstStartedPulling="2025-10-11 03:01:37.965522101 +0000 UTC m=+908.898609745" lastFinishedPulling="2025-10-11 03:01:46.827815079 +0000 UTC m=+917.760902723" observedRunningTime="2025-10-11 03:01:49.014754955 +0000 UTC m=+919.947842599" watchObservedRunningTime="2025-10-11 03:01:49.024882663 +0000 UTC m=+919.957970307" Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.971079 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54677831-1449-4579-8948-fbf874123d6b","Type":"ContainerStarted","Data":"dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734"} Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.972844 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fee3eac1-8d2e-4182-a666-d9d15aaccd23","Type":"ContainerStarted","Data":"d6d046c528c7f57df75d9528dedfb6f98dc3d824395bda9180e62ca01e3c4d2d"} Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.975039 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8c48e3d3-4846-4492-930a-110c3bf715b1","Type":"ContainerStarted","Data":"2c9e75b715e5b7ad42c5154efff608a719f20ff46c9626e2ee65b8fcc742c7e0"} Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.975125 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.976572 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-w92fd" event={"ID":"bae4146e-934f-4986-ba64-20add72d9c12","Type":"ContainerStarted","Data":"1da5af1c06814aea5db11040868f6f6659c08b209ab92c0f5a8647a1a58eacb4"} Oct 11 03:01:49 crc kubenswrapper[4953]: I1011 03:01:49.978439 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7" event={"ID":"e845a96b-97b5-4417-be95-7a4760a84897","Type":"ContainerStarted","Data":"50725f193058968b3867284d5ec312a428ed43fb214bd4b628856cc36226563a"} Oct 11 03:01:50 crc kubenswrapper[4953]: I1011 03:01:50.034139 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-2mxr7" podStartSLOduration=8.38288675 podStartE2EDuration="17.034120641s" podCreationTimestamp="2025-10-11 03:01:33 +0000 UTC" firstStartedPulling="2025-10-11 03:01:38.351730299 +0000 UTC m=+909.284817943" lastFinishedPulling="2025-10-11 03:01:47.0029642 +0000 UTC m=+917.936051834" observedRunningTime="2025-10-11 03:01:50.026439165 +0000 UTC m=+920.959526849" watchObservedRunningTime="2025-10-11 03:01:50.034120641 +0000 UTC m=+920.967208285" Oct 11 03:01:50 crc kubenswrapper[4953]: I1011 03:01:50.080755 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=11.515368783 podStartE2EDuration="21.080733961s" podCreationTimestamp="2025-10-11 03:01:29 +0000 UTC" firstStartedPulling="2025-10-11 03:01:37.985184263 +0000 UTC m=+908.918271907" lastFinishedPulling="2025-10-11 03:01:47.550549441 +0000 UTC m=+918.483637085" observedRunningTime="2025-10-11 03:01:50.070851669 +0000 UTC m=+921.003939323" watchObservedRunningTime="2025-10-11 03:01:50.080733961 +0000 UTC m=+921.013821615" Oct 11 03:01:50 crc kubenswrapper[4953]: I1011 03:01:50.989274 4953 generic.go:334] "Generic (PLEG): container finished" podID="bae4146e-934f-4986-ba64-20add72d9c12" containerID="1da5af1c06814aea5db11040868f6f6659c08b209ab92c0f5a8647a1a58eacb4" exitCode=0 Oct 11 03:01:50 crc kubenswrapper[4953]: I1011 03:01:50.989395 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-w92fd" event={"ID":"bae4146e-934f-4986-ba64-20add72d9c12","Type":"ContainerDied","Data":"1da5af1c06814aea5db11040868f6f6659c08b209ab92c0f5a8647a1a58eacb4"} Oct 11 03:01:50 crc kubenswrapper[4953]: I1011 03:01:50.990160 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-2mxr7" Oct 11 03:01:53 crc kubenswrapper[4953]: I1011 03:01:53.432813 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 11 03:01:56 crc kubenswrapper[4953]: I1011 03:01:56.041085 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-w92fd" event={"ID":"bae4146e-934f-4986-ba64-20add72d9c12","Type":"ContainerStarted","Data":"ca5f13a50e7015d89ffa4ff06c8edcae41657a265fd039dd7626a0efecf6a274"} Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.060504 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ccfff6f8-4954-46d7-ba26-c317b321a169","Type":"ContainerStarted","Data":"bc82241ed82cab6d3869025184c4d5b29f324418882d567ad7902ccab1997027"} Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.063129 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"127eaeb9-abc1-44ee-90b7-07bbf0a85837","Type":"ContainerStarted","Data":"43e045e1747f1fb6ac3495bb31c32dffd4af1734a3ddedcf4ebbe81f144db6b8"} Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.065229 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-w92fd" event={"ID":"bae4146e-934f-4986-ba64-20add72d9c12","Type":"ContainerStarted","Data":"75af927baae2fdf1fe265ec30338f183ca532e7048ea623998b1eb8598000e16"} Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.065435 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.065481 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.082261 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.664056562 podStartE2EDuration="22.08224592s" podCreationTimestamp="2025-10-11 03:01:36 +0000 UTC" firstStartedPulling="2025-10-11 03:01:38.652714471 +0000 UTC m=+909.585802115" lastFinishedPulling="2025-10-11 03:01:57.070903829 +0000 UTC m=+928.003991473" observedRunningTime="2025-10-11 03:01:58.081735837 +0000 UTC m=+929.014823491" watchObservedRunningTime="2025-10-11 03:01:58.08224592 +0000 UTC m=+929.015333564" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.103350 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.446871945 podStartE2EDuration="22.103332588s" podCreationTimestamp="2025-10-11 03:01:36 +0000 UTC" firstStartedPulling="2025-10-11 03:01:39.429630224 +0000 UTC m=+910.362717868" lastFinishedPulling="2025-10-11 03:01:57.086090867 +0000 UTC m=+928.019178511" observedRunningTime="2025-10-11 03:01:58.100175917 +0000 UTC m=+929.033263581" watchObservedRunningTime="2025-10-11 03:01:58.103332588 +0000 UTC m=+929.036420232" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.127751 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-w92fd" podStartSLOduration=16.750826151 podStartE2EDuration="25.127724391s" podCreationTimestamp="2025-10-11 03:01:33 +0000 UTC" firstStartedPulling="2025-10-11 03:01:38.623786192 +0000 UTC m=+909.556873836" lastFinishedPulling="2025-10-11 03:01:47.000684432 +0000 UTC m=+917.933772076" observedRunningTime="2025-10-11 03:01:58.120585638 +0000 UTC m=+929.053673282" watchObservedRunningTime="2025-10-11 03:01:58.127724391 +0000 UTC m=+929.060812035" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.733837 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.772054 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.871375 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:58 crc kubenswrapper[4953]: I1011 03:01:58.873326 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.074167 4953 generic.go:334] "Generic (PLEG): container finished" podID="179f5065-b743-428e-af8e-9e95fb0ea966" containerID="1ec00d701e0cac6861be9f27897a953c855cf711515029c444304733234d97c7" exitCode=0 Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.074239 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"179f5065-b743-428e-af8e-9e95fb0ea966","Type":"ContainerDied","Data":"1ec00d701e0cac6861be9f27897a953c855cf711515029c444304733234d97c7"} Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.075724 4953 generic.go:334] "Generic (PLEG): container finished" podID="f356e594-4357-488b-8b0a-a549d0a04531" containerID="df7238452fe85cf4799398eba36fc407b6489d3823e960f84fc49fdfb7705e53" exitCode=0 Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.076222 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f356e594-4357-488b-8b0a-a549d0a04531","Type":"ContainerDied","Data":"df7238452fe85cf4799398eba36fc407b6489d3823e960f84fc49fdfb7705e53"} Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.076447 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.076655 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.128815 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.154795 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.448909 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-bw8zz"] Oct 11 03:01:59 crc kubenswrapper[4953]: E1011 03:01:59.449809 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9712be94-0170-464b-806d-3257d0ec1f28" containerName="init" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.449831 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9712be94-0170-464b-806d-3257d0ec1f28" containerName="init" Oct 11 03:01:59 crc kubenswrapper[4953]: E1011 03:01:59.449848 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9712be94-0170-464b-806d-3257d0ec1f28" containerName="dnsmasq-dns" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.449856 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9712be94-0170-464b-806d-3257d0ec1f28" containerName="dnsmasq-dns" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.450089 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9712be94-0170-464b-806d-3257d0ec1f28" containerName="dnsmasq-dns" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.451129 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.453338 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.456197 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-bw8zz"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.561440 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-mbwl2"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.562875 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.571977 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-mbwl2"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.572488 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.629783 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bdfs\" (UniqueName: \"kubernetes.io/projected/b830b7a0-7fc3-400d-aaf3-614818b3e833-kube-api-access-4bdfs\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.629877 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.629960 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-config\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.630005 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.651330 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.661898 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.667077 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.667131 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.667082 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.667326 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-ghcl8" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.671547 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.681827 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-bw8zz"] Oct 11 03:01:59 crc kubenswrapper[4953]: E1011 03:01:59.682941 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-4bdfs ovsdbserver-nb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" podUID="b830b7a0-7fc3-400d-aaf3-614818b3e833" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.719442 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-z6xn5"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.721047 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.725042 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.726783 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-z6xn5"] Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.733057 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9284d870-067e-4af0-98b7-b57e976c7a91-config\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.733140 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-config\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.733187 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9284d870-067e-4af0-98b7-b57e976c7a91-ovn-rundir\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.733262 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.733292 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd5fp\" (UniqueName: \"kubernetes.io/projected/9284d870-067e-4af0-98b7-b57e976c7a91-kube-api-access-dd5fp\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.734372 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bdfs\" (UniqueName: \"kubernetes.io/projected/b830b7a0-7fc3-400d-aaf3-614818b3e833-kube-api-access-4bdfs\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.734439 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9284d870-067e-4af0-98b7-b57e976c7a91-combined-ca-bundle\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.734477 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9284d870-067e-4af0-98b7-b57e976c7a91-ovs-rundir\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.734498 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9284d870-067e-4af0-98b7-b57e976c7a91-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.734539 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.735387 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-config\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.735477 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.739219 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.763928 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bdfs\" (UniqueName: \"kubernetes.io/projected/b830b7a0-7fc3-400d-aaf3-614818b3e833-kube-api-access-4bdfs\") pod \"dnsmasq-dns-7fd796d7df-bw8zz\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.835775 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bc190-9bc5-4b0c-8592-6b31362c4783-config\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836249 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5d8bc190-9bc5-4b0c-8592-6b31362c4783-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836288 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9284d870-067e-4af0-98b7-b57e976c7a91-config\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836333 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9284d870-067e-4af0-98b7-b57e976c7a91-ovn-rundir\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836434 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndgnb\" (UniqueName: \"kubernetes.io/projected/5d8bc190-9bc5-4b0c-8592-6b31362c4783-kube-api-access-ndgnb\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836521 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd5fp\" (UniqueName: \"kubernetes.io/projected/9284d870-067e-4af0-98b7-b57e976c7a91-kube-api-access-dd5fp\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836562 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-config\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836615 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836643 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9284d870-067e-4af0-98b7-b57e976c7a91-ovn-rundir\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836883 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9284d870-067e-4af0-98b7-b57e976c7a91-combined-ca-bundle\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836916 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836937 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836966 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.836986 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9284d870-067e-4af0-98b7-b57e976c7a91-ovs-rundir\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837005 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9284d870-067e-4af0-98b7-b57e976c7a91-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837068 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9284d870-067e-4af0-98b7-b57e976c7a91-config\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837144 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9284d870-067e-4af0-98b7-b57e976c7a91-ovs-rundir\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837262 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837292 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn6sk\" (UniqueName: \"kubernetes.io/projected/e256eb2e-b0cc-4860-bf46-558c5ddbb512-kube-api-access-xn6sk\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837315 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bc190-9bc5-4b0c-8592-6b31362c4783-scripts\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.837334 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.840526 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9284d870-067e-4af0-98b7-b57e976c7a91-combined-ca-bundle\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.840552 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9284d870-067e-4af0-98b7-b57e976c7a91-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.853465 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd5fp\" (UniqueName: \"kubernetes.io/projected/9284d870-067e-4af0-98b7-b57e976c7a91-kube-api-access-dd5fp\") pod \"ovn-controller-metrics-mbwl2\" (UID: \"9284d870-067e-4af0-98b7-b57e976c7a91\") " pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.905560 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-mbwl2" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.939274 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.939581 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bc190-9bc5-4b0c-8592-6b31362c4783-config\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.939914 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5d8bc190-9bc5-4b0c-8592-6b31362c4783-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.940190 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndgnb\" (UniqueName: \"kubernetes.io/projected/5d8bc190-9bc5-4b0c-8592-6b31362c4783-kube-api-access-ndgnb\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.940426 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-config\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.940680 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.940925 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d8bc190-9bc5-4b0c-8592-6b31362c4783-config\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.940833 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5d8bc190-9bc5-4b0c-8592-6b31362c4783-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.941179 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-config\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.940715 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.941191 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.941860 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.942102 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.942422 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.942942 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn6sk\" (UniqueName: \"kubernetes.io/projected/e256eb2e-b0cc-4860-bf46-558c5ddbb512-kube-api-access-xn6sk\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.942594 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.943980 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.944756 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bc190-9bc5-4b0c-8592-6b31362c4783-scripts\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.945054 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.945065 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.945469 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5d8bc190-9bc5-4b0c-8592-6b31362c4783-scripts\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.949735 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d8bc190-9bc5-4b0c-8592-6b31362c4783-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.963302 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndgnb\" (UniqueName: \"kubernetes.io/projected/5d8bc190-9bc5-4b0c-8592-6b31362c4783-kube-api-access-ndgnb\") pod \"ovn-northd-0\" (UID: \"5d8bc190-9bc5-4b0c-8592-6b31362c4783\") " pod="openstack/ovn-northd-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.965638 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn6sk\" (UniqueName: \"kubernetes.io/projected/e256eb2e-b0cc-4860-bf46-558c5ddbb512-kube-api-access-xn6sk\") pod \"dnsmasq-dns-86db49b7ff-z6xn5\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.978960 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 11 03:01:59 crc kubenswrapper[4953]: I1011 03:01:59.985095 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.051678 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.091781 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"179f5065-b743-428e-af8e-9e95fb0ea966","Type":"ContainerStarted","Data":"948183516c1cc440927b21a0b26b7dde11f539bd348831022c2afa5b1de7c078"} Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.094546 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f356e594-4357-488b-8b0a-a549d0a04531","Type":"ContainerStarted","Data":"4addf5404be5bb2264f7df13c3c40ad8d67f3ace7bf4522b54038a34e11443d0"} Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.094752 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.118827 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.010442578 podStartE2EDuration="35.118812147s" podCreationTimestamp="2025-10-11 03:01:25 +0000 UTC" firstStartedPulling="2025-10-11 03:01:37.892090427 +0000 UTC m=+908.825178071" lastFinishedPulling="2025-10-11 03:01:47.000459986 +0000 UTC m=+917.933547640" observedRunningTime="2025-10-11 03:02:00.114484816 +0000 UTC m=+931.047572460" watchObservedRunningTime="2025-10-11 03:02:00.118812147 +0000 UTC m=+931.051899791" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.167227 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.254683 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-dns-svc\") pod \"b830b7a0-7fc3-400d-aaf3-614818b3e833\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.255059 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-ovsdbserver-nb\") pod \"b830b7a0-7fc3-400d-aaf3-614818b3e833\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.255212 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-config\") pod \"b830b7a0-7fc3-400d-aaf3-614818b3e833\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.255354 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bdfs\" (UniqueName: \"kubernetes.io/projected/b830b7a0-7fc3-400d-aaf3-614818b3e833-kube-api-access-4bdfs\") pod \"b830b7a0-7fc3-400d-aaf3-614818b3e833\" (UID: \"b830b7a0-7fc3-400d-aaf3-614818b3e833\") " Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.255941 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-config" (OuterVolumeSpecName: "config") pod "b830b7a0-7fc3-400d-aaf3-614818b3e833" (UID: "b830b7a0-7fc3-400d-aaf3-614818b3e833"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.255966 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b830b7a0-7fc3-400d-aaf3-614818b3e833" (UID: "b830b7a0-7fc3-400d-aaf3-614818b3e833"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.256551 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.256584 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.256890 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b830b7a0-7fc3-400d-aaf3-614818b3e833" (UID: "b830b7a0-7fc3-400d-aaf3-614818b3e833"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.281197 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b830b7a0-7fc3-400d-aaf3-614818b3e833-kube-api-access-4bdfs" (OuterVolumeSpecName: "kube-api-access-4bdfs") pod "b830b7a0-7fc3-400d-aaf3-614818b3e833" (UID: "b830b7a0-7fc3-400d-aaf3-614818b3e833"). InnerVolumeSpecName "kube-api-access-4bdfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.358044 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bdfs\" (UniqueName: \"kubernetes.io/projected/b830b7a0-7fc3-400d-aaf3-614818b3e833-kube-api-access-4bdfs\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.358069 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b830b7a0-7fc3-400d-aaf3-614818b3e833-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.368257 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.319927855 podStartE2EDuration="34.368233865s" podCreationTimestamp="2025-10-11 03:01:26 +0000 UTC" firstStartedPulling="2025-10-11 03:01:38.379288052 +0000 UTC m=+909.312375696" lastFinishedPulling="2025-10-11 03:01:47.427594062 +0000 UTC m=+918.360681706" observedRunningTime="2025-10-11 03:02:00.154141249 +0000 UTC m=+931.087228893" watchObservedRunningTime="2025-10-11 03:02:00.368233865 +0000 UTC m=+931.301321509" Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.377886 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-mbwl2"] Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.493976 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 11 03:02:00 crc kubenswrapper[4953]: I1011 03:02:00.589936 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-z6xn5"] Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.102525 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5d8bc190-9bc5-4b0c-8592-6b31362c4783","Type":"ContainerStarted","Data":"72e0eef0aaf7a11fcafcd97c9967ba08cd359c20eefa8fe0656e3cf40fe6b624"} Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.104076 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-mbwl2" event={"ID":"9284d870-067e-4af0-98b7-b57e976c7a91","Type":"ContainerStarted","Data":"743f646469b85e5c8e244b411ab16e756ae3e895ec6319d027265b112fc024f2"} Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.104095 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-mbwl2" event={"ID":"9284d870-067e-4af0-98b7-b57e976c7a91","Type":"ContainerStarted","Data":"45129480aa6773db0e6f0c4a7804681f493166b6e2ce7d8bfa73d48745e58561"} Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.105818 4953 generic.go:334] "Generic (PLEG): container finished" podID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerID="3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd" exitCode=0 Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.105874 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" event={"ID":"e256eb2e-b0cc-4860-bf46-558c5ddbb512","Type":"ContainerDied","Data":"3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd"} Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.105927 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" event={"ID":"e256eb2e-b0cc-4860-bf46-558c5ddbb512","Type":"ContainerStarted","Data":"95b8214a6956434d8fdab1364231757dc52d885f429f60d53259b2231f057a75"} Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.105897 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-bw8zz" Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.147803 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-mbwl2" podStartSLOduration=2.147770577 podStartE2EDuration="2.147770577s" podCreationTimestamp="2025-10-11 03:01:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:02:01.127252063 +0000 UTC m=+932.060339747" watchObservedRunningTime="2025-10-11 03:02:01.147770577 +0000 UTC m=+932.080858231" Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.268426 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-bw8zz"] Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.290190 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-bw8zz"] Oct 11 03:02:01 crc kubenswrapper[4953]: I1011 03:02:01.821085 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b830b7a0-7fc3-400d-aaf3-614818b3e833" path="/var/lib/kubelet/pods/b830b7a0-7fc3-400d-aaf3-614818b3e833/volumes" Oct 11 03:02:02 crc kubenswrapper[4953]: I1011 03:02:02.114266 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" event={"ID":"e256eb2e-b0cc-4860-bf46-558c5ddbb512","Type":"ContainerStarted","Data":"87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119"} Oct 11 03:02:02 crc kubenswrapper[4953]: I1011 03:02:02.114381 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:02:02 crc kubenswrapper[4953]: I1011 03:02:02.118872 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5d8bc190-9bc5-4b0c-8592-6b31362c4783","Type":"ContainerStarted","Data":"b0595af66700fe549dbab68f3deca686f94357a5ee1ee872718c1ab9cd0ab45a"} Oct 11 03:02:02 crc kubenswrapper[4953]: I1011 03:02:02.118917 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"5d8bc190-9bc5-4b0c-8592-6b31362c4783","Type":"ContainerStarted","Data":"c96718621ac7b8d875207c5a264f6db77ac156dfe0f551229fcefd666fab8907"} Oct 11 03:02:02 crc kubenswrapper[4953]: I1011 03:02:02.137506 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" podStartSLOduration=3.137491706 podStartE2EDuration="3.137491706s" podCreationTimestamp="2025-10-11 03:01:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:02:02.133238647 +0000 UTC m=+933.066326311" watchObservedRunningTime="2025-10-11 03:02:02.137491706 +0000 UTC m=+933.070579350" Oct 11 03:02:02 crc kubenswrapper[4953]: I1011 03:02:02.159704 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.950489341 podStartE2EDuration="3.159686152s" podCreationTimestamp="2025-10-11 03:01:59 +0000 UTC" firstStartedPulling="2025-10-11 03:02:00.518554723 +0000 UTC m=+931.451642367" lastFinishedPulling="2025-10-11 03:02:01.727751524 +0000 UTC m=+932.660839178" observedRunningTime="2025-10-11 03:02:02.159521848 +0000 UTC m=+933.092609512" watchObservedRunningTime="2025-10-11 03:02:02.159686152 +0000 UTC m=+933.092773796" Oct 11 03:02:03 crc kubenswrapper[4953]: I1011 03:02:03.127955 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 11 03:02:06 crc kubenswrapper[4953]: I1011 03:02:06.905568 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 11 03:02:06 crc kubenswrapper[4953]: I1011 03:02:06.906164 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 11 03:02:06 crc kubenswrapper[4953]: I1011 03:02:06.965903 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 11 03:02:07 crc kubenswrapper[4953]: I1011 03:02:07.238052 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.117404 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-cb9dw"] Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.118775 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.135745 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cb9dw"] Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.224580 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j488\" (UniqueName: \"kubernetes.io/projected/f3131a25-a541-407f-911d-95c20d7f368a-kube-api-access-2j488\") pod \"keystone-db-create-cb9dw\" (UID: \"f3131a25-a541-407f-911d-95c20d7f368a\") " pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.254126 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.254544 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.277632 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-swwlm"] Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.278663 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-swwlm" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.289565 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-swwlm"] Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.308292 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.328170 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8k4c\" (UniqueName: \"kubernetes.io/projected/9c1b94b5-2f83-482e-a795-1e7b307f168e-kube-api-access-c8k4c\") pod \"placement-db-create-swwlm\" (UID: \"9c1b94b5-2f83-482e-a795-1e7b307f168e\") " pod="openstack/placement-db-create-swwlm" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.328292 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j488\" (UniqueName: \"kubernetes.io/projected/f3131a25-a541-407f-911d-95c20d7f368a-kube-api-access-2j488\") pod \"keystone-db-create-cb9dw\" (UID: \"f3131a25-a541-407f-911d-95c20d7f368a\") " pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.353782 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j488\" (UniqueName: \"kubernetes.io/projected/f3131a25-a541-407f-911d-95c20d7f368a-kube-api-access-2j488\") pod \"keystone-db-create-cb9dw\" (UID: \"f3131a25-a541-407f-911d-95c20d7f368a\") " pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.429491 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8k4c\" (UniqueName: \"kubernetes.io/projected/9c1b94b5-2f83-482e-a795-1e7b307f168e-kube-api-access-c8k4c\") pod \"placement-db-create-swwlm\" (UID: \"9c1b94b5-2f83-482e-a795-1e7b307f168e\") " pod="openstack/placement-db-create-swwlm" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.447805 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8k4c\" (UniqueName: \"kubernetes.io/projected/9c1b94b5-2f83-482e-a795-1e7b307f168e-kube-api-access-c8k4c\") pod \"placement-db-create-swwlm\" (UID: \"9c1b94b5-2f83-482e-a795-1e7b307f168e\") " pod="openstack/placement-db-create-swwlm" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.473887 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.594073 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-swwlm" Oct 11 03:02:08 crc kubenswrapper[4953]: I1011 03:02:08.974460 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cb9dw"] Oct 11 03:02:09 crc kubenswrapper[4953]: I1011 03:02:09.065888 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-swwlm"] Oct 11 03:02:09 crc kubenswrapper[4953]: W1011 03:02:09.092150 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c1b94b5_2f83_482e_a795_1e7b307f168e.slice/crio-c8f24c15ab35463f4efd5e1f779085b6bb5641b6e0ede3db2178b7b4ec404490 WatchSource:0}: Error finding container c8f24c15ab35463f4efd5e1f779085b6bb5641b6e0ede3db2178b7b4ec404490: Status 404 returned error can't find the container with id c8f24c15ab35463f4efd5e1f779085b6bb5641b6e0ede3db2178b7b4ec404490 Oct 11 03:02:09 crc kubenswrapper[4953]: I1011 03:02:09.185879 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cb9dw" event={"ID":"f3131a25-a541-407f-911d-95c20d7f368a","Type":"ContainerStarted","Data":"1fa8d3fcc2972616b5bf2f8807a67de77ca6ef3f2002c9551329f47e547c49f4"} Oct 11 03:02:09 crc kubenswrapper[4953]: I1011 03:02:09.185940 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cb9dw" event={"ID":"f3131a25-a541-407f-911d-95c20d7f368a","Type":"ContainerStarted","Data":"6bde9bcd45db104f0f1506f2ab6d7264e7e9448d9d25c5ead5f10534087ed477"} Oct 11 03:02:09 crc kubenswrapper[4953]: I1011 03:02:09.191350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-swwlm" event={"ID":"9c1b94b5-2f83-482e-a795-1e7b307f168e","Type":"ContainerStarted","Data":"c8f24c15ab35463f4efd5e1f779085b6bb5641b6e0ede3db2178b7b4ec404490"} Oct 11 03:02:09 crc kubenswrapper[4953]: I1011 03:02:09.248214 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 11 03:02:09 crc kubenswrapper[4953]: I1011 03:02:09.275568 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-cb9dw" podStartSLOduration=1.27554229 podStartE2EDuration="1.27554229s" podCreationTimestamp="2025-10-11 03:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:02:09.207039781 +0000 UTC m=+940.140127485" watchObservedRunningTime="2025-10-11 03:02:09.27554229 +0000 UTC m=+940.208629974" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.053125 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.126993 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g5dmm"] Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.127262 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerName="dnsmasq-dns" containerID="cri-o://c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4" gracePeriod=10 Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.200146 4953 generic.go:334] "Generic (PLEG): container finished" podID="f3131a25-a541-407f-911d-95c20d7f368a" containerID="1fa8d3fcc2972616b5bf2f8807a67de77ca6ef3f2002c9551329f47e547c49f4" exitCode=0 Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.200212 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cb9dw" event={"ID":"f3131a25-a541-407f-911d-95c20d7f368a","Type":"ContainerDied","Data":"1fa8d3fcc2972616b5bf2f8807a67de77ca6ef3f2002c9551329f47e547c49f4"} Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.201444 4953 generic.go:334] "Generic (PLEG): container finished" podID="9c1b94b5-2f83-482e-a795-1e7b307f168e" containerID="9960828f480b3dfcb61f958e4556aee0dffda17587c2e6456f930519ebf59620" exitCode=0 Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.201519 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-swwlm" event={"ID":"9c1b94b5-2f83-482e-a795-1e7b307f168e","Type":"ContainerDied","Data":"9960828f480b3dfcb61f958e4556aee0dffda17587c2e6456f930519ebf59620"} Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.685994 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.776027 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-dns-svc\") pod \"16ceccb6-50bc-47e9-83c9-82117806aa76\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.776083 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-config\") pod \"16ceccb6-50bc-47e9-83c9-82117806aa76\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.776106 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jx5f\" (UniqueName: \"kubernetes.io/projected/16ceccb6-50bc-47e9-83c9-82117806aa76-kube-api-access-6jx5f\") pod \"16ceccb6-50bc-47e9-83c9-82117806aa76\" (UID: \"16ceccb6-50bc-47e9-83c9-82117806aa76\") " Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.781733 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16ceccb6-50bc-47e9-83c9-82117806aa76-kube-api-access-6jx5f" (OuterVolumeSpecName: "kube-api-access-6jx5f") pod "16ceccb6-50bc-47e9-83c9-82117806aa76" (UID: "16ceccb6-50bc-47e9-83c9-82117806aa76"). InnerVolumeSpecName "kube-api-access-6jx5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.815337 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-config" (OuterVolumeSpecName: "config") pod "16ceccb6-50bc-47e9-83c9-82117806aa76" (UID: "16ceccb6-50bc-47e9-83c9-82117806aa76"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.830325 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "16ceccb6-50bc-47e9-83c9-82117806aa76" (UID: "16ceccb6-50bc-47e9-83c9-82117806aa76"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.877193 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.877229 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16ceccb6-50bc-47e9-83c9-82117806aa76-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:10 crc kubenswrapper[4953]: I1011 03:02:10.877238 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jx5f\" (UniqueName: \"kubernetes.io/projected/16ceccb6-50bc-47e9-83c9-82117806aa76-kube-api-access-6jx5f\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.211548 4953 generic.go:334] "Generic (PLEG): container finished" podID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerID="c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4" exitCode=0 Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.211635 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.211685 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" event={"ID":"16ceccb6-50bc-47e9-83c9-82117806aa76","Type":"ContainerDied","Data":"c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4"} Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.211730 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-g5dmm" event={"ID":"16ceccb6-50bc-47e9-83c9-82117806aa76","Type":"ContainerDied","Data":"b5a21a2d162472c2aa7355fe64b0aa90caed604a832a33e260364f0c64cfd892"} Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.211759 4953 scope.go:117] "RemoveContainer" containerID="c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.263954 4953 scope.go:117] "RemoveContainer" containerID="36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.298996 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g5dmm"] Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.321727 4953 scope.go:117] "RemoveContainer" containerID="c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.322536 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.324395 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-g5dmm"] Oct 11 03:02:11 crc kubenswrapper[4953]: E1011 03:02:11.324550 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4\": container with ID starting with c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4 not found: ID does not exist" containerID="c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.322585 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.324595 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4"} err="failed to get container status \"c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4\": rpc error: code = NotFound desc = could not find container \"c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4\": container with ID starting with c53809e3394db6a587e073c81a61141f3015c0eea9204f186fafb66dae3bcab4 not found: ID does not exist" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.356656 4953 scope.go:117] "RemoveContainer" containerID="36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd" Oct 11 03:02:11 crc kubenswrapper[4953]: E1011 03:02:11.365196 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd\": container with ID starting with 36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd not found: ID does not exist" containerID="36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.365253 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd"} err="failed to get container status \"36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd\": rpc error: code = NotFound desc = could not find container \"36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd\": container with ID starting with 36dbbf547a34bb7dc69800f3aa587dd6664861cf41db989186c516d80830cffd not found: ID does not exist" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.617824 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.722258 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j488\" (UniqueName: \"kubernetes.io/projected/f3131a25-a541-407f-911d-95c20d7f368a-kube-api-access-2j488\") pod \"f3131a25-a541-407f-911d-95c20d7f368a\" (UID: \"f3131a25-a541-407f-911d-95c20d7f368a\") " Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.727689 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3131a25-a541-407f-911d-95c20d7f368a-kube-api-access-2j488" (OuterVolumeSpecName: "kube-api-access-2j488") pod "f3131a25-a541-407f-911d-95c20d7f368a" (UID: "f3131a25-a541-407f-911d-95c20d7f368a"). InnerVolumeSpecName "kube-api-access-2j488". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.766389 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-swwlm" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.803779 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" path="/var/lib/kubelet/pods/16ceccb6-50bc-47e9-83c9-82117806aa76/volumes" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.828019 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j488\" (UniqueName: \"kubernetes.io/projected/f3131a25-a541-407f-911d-95c20d7f368a-kube-api-access-2j488\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.929116 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8k4c\" (UniqueName: \"kubernetes.io/projected/9c1b94b5-2f83-482e-a795-1e7b307f168e-kube-api-access-c8k4c\") pod \"9c1b94b5-2f83-482e-a795-1e7b307f168e\" (UID: \"9c1b94b5-2f83-482e-a795-1e7b307f168e\") " Oct 11 03:02:11 crc kubenswrapper[4953]: I1011 03:02:11.932051 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c1b94b5-2f83-482e-a795-1e7b307f168e-kube-api-access-c8k4c" (OuterVolumeSpecName: "kube-api-access-c8k4c") pod "9c1b94b5-2f83-482e-a795-1e7b307f168e" (UID: "9c1b94b5-2f83-482e-a795-1e7b307f168e"). InnerVolumeSpecName "kube-api-access-c8k4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.032089 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8k4c\" (UniqueName: \"kubernetes.io/projected/9c1b94b5-2f83-482e-a795-1e7b307f168e-kube-api-access-c8k4c\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.226049 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cb9dw" event={"ID":"f3131a25-a541-407f-911d-95c20d7f368a","Type":"ContainerDied","Data":"6bde9bcd45db104f0f1506f2ab6d7264e7e9448d9d25c5ead5f10534087ed477"} Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.226087 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6bde9bcd45db104f0f1506f2ab6d7264e7e9448d9d25c5ead5f10534087ed477" Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.226096 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cb9dw" Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.227351 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-swwlm" event={"ID":"9c1b94b5-2f83-482e-a795-1e7b307f168e","Type":"ContainerDied","Data":"c8f24c15ab35463f4efd5e1f779085b6bb5641b6e0ede3db2178b7b4ec404490"} Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.227384 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-swwlm" Oct 11 03:02:12 crc kubenswrapper[4953]: I1011 03:02:12.227394 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8f24c15ab35463f4efd5e1f779085b6bb5641b6e0ede3db2178b7b4ec404490" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.580314 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-zlg2z"] Oct 11 03:02:13 crc kubenswrapper[4953]: E1011 03:02:13.581349 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerName="dnsmasq-dns" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.581379 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerName="dnsmasq-dns" Oct 11 03:02:13 crc kubenswrapper[4953]: E1011 03:02:13.581429 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerName="init" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.581444 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerName="init" Oct 11 03:02:13 crc kubenswrapper[4953]: E1011 03:02:13.581468 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3131a25-a541-407f-911d-95c20d7f368a" containerName="mariadb-database-create" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.581481 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3131a25-a541-407f-911d-95c20d7f368a" containerName="mariadb-database-create" Oct 11 03:02:13 crc kubenswrapper[4953]: E1011 03:02:13.581513 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1b94b5-2f83-482e-a795-1e7b307f168e" containerName="mariadb-database-create" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.581526 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1b94b5-2f83-482e-a795-1e7b307f168e" containerName="mariadb-database-create" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.581886 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3131a25-a541-407f-911d-95c20d7f368a" containerName="mariadb-database-create" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.581944 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c1b94b5-2f83-482e-a795-1e7b307f168e" containerName="mariadb-database-create" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.582007 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ceccb6-50bc-47e9-83c9-82117806aa76" containerName="dnsmasq-dns" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.583273 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.597881 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zlg2z"] Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.757031 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbtj6\" (UniqueName: \"kubernetes.io/projected/8ff743f4-1191-4f96-aa4d-3863163cd0a3-kube-api-access-hbtj6\") pod \"glance-db-create-zlg2z\" (UID: \"8ff743f4-1191-4f96-aa4d-3863163cd0a3\") " pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.857954 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbtj6\" (UniqueName: \"kubernetes.io/projected/8ff743f4-1191-4f96-aa4d-3863163cd0a3-kube-api-access-hbtj6\") pod \"glance-db-create-zlg2z\" (UID: \"8ff743f4-1191-4f96-aa4d-3863163cd0a3\") " pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.877849 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbtj6\" (UniqueName: \"kubernetes.io/projected/8ff743f4-1191-4f96-aa4d-3863163cd0a3-kube-api-access-hbtj6\") pod \"glance-db-create-zlg2z\" (UID: \"8ff743f4-1191-4f96-aa4d-3863163cd0a3\") " pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:13 crc kubenswrapper[4953]: I1011 03:02:13.902758 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:14 crc kubenswrapper[4953]: I1011 03:02:14.375381 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zlg2z"] Oct 11 03:02:14 crc kubenswrapper[4953]: W1011 03:02:14.395846 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ff743f4_1191_4f96_aa4d_3863163cd0a3.slice/crio-931669acd37d726b54fc08f883a571b119c507bd8df9f1291ab30014677d6166 WatchSource:0}: Error finding container 931669acd37d726b54fc08f883a571b119c507bd8df9f1291ab30014677d6166: Status 404 returned error can't find the container with id 931669acd37d726b54fc08f883a571b119c507bd8df9f1291ab30014677d6166 Oct 11 03:02:15 crc kubenswrapper[4953]: I1011 03:02:15.085318 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 11 03:02:15 crc kubenswrapper[4953]: I1011 03:02:15.256845 4953 generic.go:334] "Generic (PLEG): container finished" podID="8ff743f4-1191-4f96-aa4d-3863163cd0a3" containerID="bd717d659eff966309fdb8c04428b96b16b9c97691315c49476a8ebd69a0337f" exitCode=0 Oct 11 03:02:15 crc kubenswrapper[4953]: I1011 03:02:15.256900 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zlg2z" event={"ID":"8ff743f4-1191-4f96-aa4d-3863163cd0a3","Type":"ContainerDied","Data":"bd717d659eff966309fdb8c04428b96b16b9c97691315c49476a8ebd69a0337f"} Oct 11 03:02:15 crc kubenswrapper[4953]: I1011 03:02:15.256941 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zlg2z" event={"ID":"8ff743f4-1191-4f96-aa4d-3863163cd0a3","Type":"ContainerStarted","Data":"931669acd37d726b54fc08f883a571b119c507bd8df9f1291ab30014677d6166"} Oct 11 03:02:16 crc kubenswrapper[4953]: I1011 03:02:16.629762 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:16 crc kubenswrapper[4953]: I1011 03:02:16.808721 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbtj6\" (UniqueName: \"kubernetes.io/projected/8ff743f4-1191-4f96-aa4d-3863163cd0a3-kube-api-access-hbtj6\") pod \"8ff743f4-1191-4f96-aa4d-3863163cd0a3\" (UID: \"8ff743f4-1191-4f96-aa4d-3863163cd0a3\") " Oct 11 03:02:16 crc kubenswrapper[4953]: I1011 03:02:16.821905 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff743f4-1191-4f96-aa4d-3863163cd0a3-kube-api-access-hbtj6" (OuterVolumeSpecName: "kube-api-access-hbtj6") pod "8ff743f4-1191-4f96-aa4d-3863163cd0a3" (UID: "8ff743f4-1191-4f96-aa4d-3863163cd0a3"). InnerVolumeSpecName "kube-api-access-hbtj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:16 crc kubenswrapper[4953]: I1011 03:02:16.914710 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbtj6\" (UniqueName: \"kubernetes.io/projected/8ff743f4-1191-4f96-aa4d-3863163cd0a3-kube-api-access-hbtj6\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:17 crc kubenswrapper[4953]: I1011 03:02:17.279433 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zlg2z" event={"ID":"8ff743f4-1191-4f96-aa4d-3863163cd0a3","Type":"ContainerDied","Data":"931669acd37d726b54fc08f883a571b119c507bd8df9f1291ab30014677d6166"} Oct 11 03:02:17 crc kubenswrapper[4953]: I1011 03:02:17.279461 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zlg2z" Oct 11 03:02:17 crc kubenswrapper[4953]: I1011 03:02:17.279479 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="931669acd37d726b54fc08f883a571b119c507bd8df9f1291ab30014677d6166" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.243908 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-51a6-account-create-pt5kn"] Oct 11 03:02:18 crc kubenswrapper[4953]: E1011 03:02:18.244375 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff743f4-1191-4f96-aa4d-3863163cd0a3" containerName="mariadb-database-create" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.244399 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff743f4-1191-4f96-aa4d-3863163cd0a3" containerName="mariadb-database-create" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.244710 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff743f4-1191-4f96-aa4d-3863163cd0a3" containerName="mariadb-database-create" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.245454 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.247935 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.252208 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-51a6-account-create-pt5kn"] Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.337054 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r8kw\" (UniqueName: \"kubernetes.io/projected/0fa0f95a-70f4-4ab2-9f09-85506fb7160b-kube-api-access-4r8kw\") pod \"keystone-51a6-account-create-pt5kn\" (UID: \"0fa0f95a-70f4-4ab2-9f09-85506fb7160b\") " pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.413017 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-d9a5-account-create-cgdnn"] Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.414204 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.416487 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.419349 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d9a5-account-create-cgdnn"] Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.451481 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r8kw\" (UniqueName: \"kubernetes.io/projected/0fa0f95a-70f4-4ab2-9f09-85506fb7160b-kube-api-access-4r8kw\") pod \"keystone-51a6-account-create-pt5kn\" (UID: \"0fa0f95a-70f4-4ab2-9f09-85506fb7160b\") " pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.469299 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r8kw\" (UniqueName: \"kubernetes.io/projected/0fa0f95a-70f4-4ab2-9f09-85506fb7160b-kube-api-access-4r8kw\") pod \"keystone-51a6-account-create-pt5kn\" (UID: \"0fa0f95a-70f4-4ab2-9f09-85506fb7160b\") " pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.553727 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzr26\" (UniqueName: \"kubernetes.io/projected/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6-kube-api-access-bzr26\") pod \"placement-d9a5-account-create-cgdnn\" (UID: \"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6\") " pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.561406 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.656070 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzr26\" (UniqueName: \"kubernetes.io/projected/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6-kube-api-access-bzr26\") pod \"placement-d9a5-account-create-cgdnn\" (UID: \"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6\") " pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.680228 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzr26\" (UniqueName: \"kubernetes.io/projected/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6-kube-api-access-bzr26\") pod \"placement-d9a5-account-create-cgdnn\" (UID: \"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6\") " pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.753420 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:18 crc kubenswrapper[4953]: I1011 03:02:18.998474 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-51a6-account-create-pt5kn"] Oct 11 03:02:19 crc kubenswrapper[4953]: I1011 03:02:19.216165 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-d9a5-account-create-cgdnn"] Oct 11 03:02:19 crc kubenswrapper[4953]: W1011 03:02:19.221725 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b4d1d54_cdb9_4d30_90e6_23d0ac44e1c6.slice/crio-142be85c115956f1b269aa237d80c5c4ffb912c06a6d562bb5c600d2b142c4c4 WatchSource:0}: Error finding container 142be85c115956f1b269aa237d80c5c4ffb912c06a6d562bb5c600d2b142c4c4: Status 404 returned error can't find the container with id 142be85c115956f1b269aa237d80c5c4ffb912c06a6d562bb5c600d2b142c4c4 Oct 11 03:02:19 crc kubenswrapper[4953]: I1011 03:02:19.295450 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d9a5-account-create-cgdnn" event={"ID":"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6","Type":"ContainerStarted","Data":"142be85c115956f1b269aa237d80c5c4ffb912c06a6d562bb5c600d2b142c4c4"} Oct 11 03:02:19 crc kubenswrapper[4953]: I1011 03:02:19.297073 4953 generic.go:334] "Generic (PLEG): container finished" podID="0fa0f95a-70f4-4ab2-9f09-85506fb7160b" containerID="f81bb8c88b11c870e065cdf1143bf7b57e272188b07b7bffbd3b23e6d03061e3" exitCode=0 Oct 11 03:02:19 crc kubenswrapper[4953]: I1011 03:02:19.297120 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-51a6-account-create-pt5kn" event={"ID":"0fa0f95a-70f4-4ab2-9f09-85506fb7160b","Type":"ContainerDied","Data":"f81bb8c88b11c870e065cdf1143bf7b57e272188b07b7bffbd3b23e6d03061e3"} Oct 11 03:02:19 crc kubenswrapper[4953]: I1011 03:02:19.297147 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-51a6-account-create-pt5kn" event={"ID":"0fa0f95a-70f4-4ab2-9f09-85506fb7160b","Type":"ContainerStarted","Data":"e3356d2ea50a85bc30ab6153f0d4e7cb74829e4c74aea7e1cb085f1f929bdcc2"} Oct 11 03:02:20 crc kubenswrapper[4953]: I1011 03:02:20.305988 4953 generic.go:334] "Generic (PLEG): container finished" podID="2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6" containerID="22b262232bab65ba935db174a446585ea647293af26a4516ef93354da9ae05a5" exitCode=0 Oct 11 03:02:20 crc kubenswrapper[4953]: I1011 03:02:20.306077 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d9a5-account-create-cgdnn" event={"ID":"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6","Type":"ContainerDied","Data":"22b262232bab65ba935db174a446585ea647293af26a4516ef93354da9ae05a5"} Oct 11 03:02:20 crc kubenswrapper[4953]: I1011 03:02:20.641961 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:20 crc kubenswrapper[4953]: I1011 03:02:20.804992 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r8kw\" (UniqueName: \"kubernetes.io/projected/0fa0f95a-70f4-4ab2-9f09-85506fb7160b-kube-api-access-4r8kw\") pod \"0fa0f95a-70f4-4ab2-9f09-85506fb7160b\" (UID: \"0fa0f95a-70f4-4ab2-9f09-85506fb7160b\") " Oct 11 03:02:20 crc kubenswrapper[4953]: I1011 03:02:20.810804 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa0f95a-70f4-4ab2-9f09-85506fb7160b-kube-api-access-4r8kw" (OuterVolumeSpecName: "kube-api-access-4r8kw") pod "0fa0f95a-70f4-4ab2-9f09-85506fb7160b" (UID: "0fa0f95a-70f4-4ab2-9f09-85506fb7160b"). InnerVolumeSpecName "kube-api-access-4r8kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:20 crc kubenswrapper[4953]: I1011 03:02:20.907056 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r8kw\" (UniqueName: \"kubernetes.io/projected/0fa0f95a-70f4-4ab2-9f09-85506fb7160b-kube-api-access-4r8kw\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.315132 4953 generic.go:334] "Generic (PLEG): container finished" podID="54677831-1449-4579-8948-fbf874123d6b" containerID="dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734" exitCode=0 Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.315212 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54677831-1449-4579-8948-fbf874123d6b","Type":"ContainerDied","Data":"dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734"} Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.316460 4953 generic.go:334] "Generic (PLEG): container finished" podID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerID="d6d046c528c7f57df75d9528dedfb6f98dc3d824395bda9180e62ca01e3c4d2d" exitCode=0 Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.316522 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fee3eac1-8d2e-4182-a666-d9d15aaccd23","Type":"ContainerDied","Data":"d6d046c528c7f57df75d9528dedfb6f98dc3d824395bda9180e62ca01e3c4d2d"} Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.320379 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-51a6-account-create-pt5kn" Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.320471 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-51a6-account-create-pt5kn" event={"ID":"0fa0f95a-70f4-4ab2-9f09-85506fb7160b","Type":"ContainerDied","Data":"e3356d2ea50a85bc30ab6153f0d4e7cb74829e4c74aea7e1cb085f1f929bdcc2"} Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.320513 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3356d2ea50a85bc30ab6153f0d4e7cb74829e4c74aea7e1cb085f1f929bdcc2" Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.641805 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.821319 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzr26\" (UniqueName: \"kubernetes.io/projected/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6-kube-api-access-bzr26\") pod \"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6\" (UID: \"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6\") " Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.826445 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6-kube-api-access-bzr26" (OuterVolumeSpecName: "kube-api-access-bzr26") pod "2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6" (UID: "2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6"). InnerVolumeSpecName "kube-api-access-bzr26". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:21 crc kubenswrapper[4953]: I1011 03:02:21.923565 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzr26\" (UniqueName: \"kubernetes.io/projected/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6-kube-api-access-bzr26\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.331474 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-d9a5-account-create-cgdnn" event={"ID":"2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6","Type":"ContainerDied","Data":"142be85c115956f1b269aa237d80c5c4ffb912c06a6d562bb5c600d2b142c4c4"} Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.333169 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="142be85c115956f1b269aa237d80c5c4ffb912c06a6d562bb5c600d2b142c4c4" Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.331734 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-d9a5-account-create-cgdnn" Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.334043 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54677831-1449-4579-8948-fbf874123d6b","Type":"ContainerStarted","Data":"10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276"} Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.334266 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.336094 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fee3eac1-8d2e-4182-a666-d9d15aaccd23","Type":"ContainerStarted","Data":"1da1315d18c0c92a907d4916fa378294d9ee5d32187162148948337cb623bda6"} Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.336392 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.362635 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=50.604240159 podStartE2EDuration="59.362619613s" podCreationTimestamp="2025-10-11 03:01:23 +0000 UTC" firstStartedPulling="2025-10-11 03:01:37.852699162 +0000 UTC m=+908.785786806" lastFinishedPulling="2025-10-11 03:01:46.611078606 +0000 UTC m=+917.544166260" observedRunningTime="2025-10-11 03:02:22.361563497 +0000 UTC m=+953.294651151" watchObservedRunningTime="2025-10-11 03:02:22.362619613 +0000 UTC m=+953.295707257" Oct 11 03:02:22 crc kubenswrapper[4953]: I1011 03:02:22.681314 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.7585947 podStartE2EDuration="59.681291809s" podCreationTimestamp="2025-10-11 03:01:23 +0000 UTC" firstStartedPulling="2025-10-11 03:01:37.956875581 +0000 UTC m=+908.889963225" lastFinishedPulling="2025-10-11 03:01:46.87957269 +0000 UTC m=+917.812660334" observedRunningTime="2025-10-11 03:02:22.402893242 +0000 UTC m=+953.335980896" watchObservedRunningTime="2025-10-11 03:02:22.681291809 +0000 UTC m=+953.614379463" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.537422 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-2mxr7" podUID="e845a96b-97b5-4417-be95-7a4760a84897" containerName="ovn-controller" probeResult="failure" output=< Oct 11 03:02:23 crc kubenswrapper[4953]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 11 03:02:23 crc kubenswrapper[4953]: > Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.689566 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-8905-account-create-lt7np"] Oct 11 03:02:23 crc kubenswrapper[4953]: E1011 03:02:23.689962 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6" containerName="mariadb-account-create" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.689978 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6" containerName="mariadb-account-create" Oct 11 03:02:23 crc kubenswrapper[4953]: E1011 03:02:23.689995 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa0f95a-70f4-4ab2-9f09-85506fb7160b" containerName="mariadb-account-create" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.690002 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa0f95a-70f4-4ab2-9f09-85506fb7160b" containerName="mariadb-account-create" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.690227 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa0f95a-70f4-4ab2-9f09-85506fb7160b" containerName="mariadb-account-create" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.690244 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6" containerName="mariadb-account-create" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.690838 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.693665 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.709682 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8905-account-create-lt7np"] Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.855413 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwjm2\" (UniqueName: \"kubernetes.io/projected/bc95d58b-c39c-4459-8a45-daae75958c99-kube-api-access-zwjm2\") pod \"glance-8905-account-create-lt7np\" (UID: \"bc95d58b-c39c-4459-8a45-daae75958c99\") " pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.957314 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwjm2\" (UniqueName: \"kubernetes.io/projected/bc95d58b-c39c-4459-8a45-daae75958c99-kube-api-access-zwjm2\") pod \"glance-8905-account-create-lt7np\" (UID: \"bc95d58b-c39c-4459-8a45-daae75958c99\") " pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:23 crc kubenswrapper[4953]: I1011 03:02:23.986344 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwjm2\" (UniqueName: \"kubernetes.io/projected/bc95d58b-c39c-4459-8a45-daae75958c99-kube-api-access-zwjm2\") pod \"glance-8905-account-create-lt7np\" (UID: \"bc95d58b-c39c-4459-8a45-daae75958c99\") " pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:24 crc kubenswrapper[4953]: I1011 03:02:24.005526 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:24 crc kubenswrapper[4953]: W1011 03:02:24.476334 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc95d58b_c39c_4459_8a45_daae75958c99.slice/crio-98c71975c05c524ea6bb5c7a25e7fe2c98ba3fb6c05a7bbc0196cf52a1d299ed WatchSource:0}: Error finding container 98c71975c05c524ea6bb5c7a25e7fe2c98ba3fb6c05a7bbc0196cf52a1d299ed: Status 404 returned error can't find the container with id 98c71975c05c524ea6bb5c7a25e7fe2c98ba3fb6c05a7bbc0196cf52a1d299ed Oct 11 03:02:24 crc kubenswrapper[4953]: I1011 03:02:24.482502 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8905-account-create-lt7np"] Oct 11 03:02:25 crc kubenswrapper[4953]: I1011 03:02:25.358364 4953 generic.go:334] "Generic (PLEG): container finished" podID="bc95d58b-c39c-4459-8a45-daae75958c99" containerID="5991c895b46e2ae00c9f586a26af25cedba7283d65543a7fb9f486bd8ba39bfa" exitCode=0 Oct 11 03:02:25 crc kubenswrapper[4953]: I1011 03:02:25.358444 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8905-account-create-lt7np" event={"ID":"bc95d58b-c39c-4459-8a45-daae75958c99","Type":"ContainerDied","Data":"5991c895b46e2ae00c9f586a26af25cedba7283d65543a7fb9f486bd8ba39bfa"} Oct 11 03:02:25 crc kubenswrapper[4953]: I1011 03:02:25.359435 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8905-account-create-lt7np" event={"ID":"bc95d58b-c39c-4459-8a45-daae75958c99","Type":"ContainerStarted","Data":"98c71975c05c524ea6bb5c7a25e7fe2c98ba3fb6c05a7bbc0196cf52a1d299ed"} Oct 11 03:02:26 crc kubenswrapper[4953]: I1011 03:02:26.668074 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:26 crc kubenswrapper[4953]: I1011 03:02:26.803131 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwjm2\" (UniqueName: \"kubernetes.io/projected/bc95d58b-c39c-4459-8a45-daae75958c99-kube-api-access-zwjm2\") pod \"bc95d58b-c39c-4459-8a45-daae75958c99\" (UID: \"bc95d58b-c39c-4459-8a45-daae75958c99\") " Oct 11 03:02:26 crc kubenswrapper[4953]: I1011 03:02:26.817392 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc95d58b-c39c-4459-8a45-daae75958c99-kube-api-access-zwjm2" (OuterVolumeSpecName: "kube-api-access-zwjm2") pod "bc95d58b-c39c-4459-8a45-daae75958c99" (UID: "bc95d58b-c39c-4459-8a45-daae75958c99"). InnerVolumeSpecName "kube-api-access-zwjm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:26 crc kubenswrapper[4953]: I1011 03:02:26.906174 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwjm2\" (UniqueName: \"kubernetes.io/projected/bc95d58b-c39c-4459-8a45-daae75958c99-kube-api-access-zwjm2\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:27 crc kubenswrapper[4953]: I1011 03:02:27.374302 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8905-account-create-lt7np" event={"ID":"bc95d58b-c39c-4459-8a45-daae75958c99","Type":"ContainerDied","Data":"98c71975c05c524ea6bb5c7a25e7fe2c98ba3fb6c05a7bbc0196cf52a1d299ed"} Oct 11 03:02:27 crc kubenswrapper[4953]: I1011 03:02:27.374693 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98c71975c05c524ea6bb5c7a25e7fe2c98ba3fb6c05a7bbc0196cf52a1d299ed" Oct 11 03:02:27 crc kubenswrapper[4953]: I1011 03:02:27.374333 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8905-account-create-lt7np" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.565428 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.568647 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-w92fd" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.588480 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-2mxr7" podUID="e845a96b-97b5-4417-be95-7a4760a84897" containerName="ovn-controller" probeResult="failure" output=< Oct 11 03:02:28 crc kubenswrapper[4953]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 11 03:02:28 crc kubenswrapper[4953]: > Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.818195 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2mxr7-config-z5prv"] Oct 11 03:02:28 crc kubenswrapper[4953]: E1011 03:02:28.818672 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc95d58b-c39c-4459-8a45-daae75958c99" containerName="mariadb-account-create" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.818699 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc95d58b-c39c-4459-8a45-daae75958c99" containerName="mariadb-account-create" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.818921 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc95d58b-c39c-4459-8a45-daae75958c99" containerName="mariadb-account-create" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.819577 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.824872 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.825432 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2mxr7-config-z5prv"] Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.857779 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-pkkj5"] Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.858847 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.864391 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-87rn5" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.864599 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.875888 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-pkkj5"] Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.937190 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-additional-scripts\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.937258 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run-ovn\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.937336 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-scripts\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.937351 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-log-ovn\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.937400 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg4w7\" (UniqueName: \"kubernetes.io/projected/fcb68216-af3d-4d8d-ba8c-344ca102474c-kube-api-access-kg4w7\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:28 crc kubenswrapper[4953]: I1011 03:02:28.937478 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038349 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg4w7\" (UniqueName: \"kubernetes.io/projected/fcb68216-af3d-4d8d-ba8c-344ca102474c-kube-api-access-kg4w7\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038422 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038464 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-config-data\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038518 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-additional-scripts\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038550 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run-ovn\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038575 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-426ss\" (UniqueName: \"kubernetes.io/projected/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-kube-api-access-426ss\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038597 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-db-sync-config-data\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038637 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-scripts\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038653 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-log-ovn\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038669 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-combined-ca-bundle\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.038977 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.039386 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-additional-scripts\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.039463 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run-ovn\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.039503 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-log-ovn\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.040680 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-scripts\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.061322 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg4w7\" (UniqueName: \"kubernetes.io/projected/fcb68216-af3d-4d8d-ba8c-344ca102474c-kube-api-access-kg4w7\") pod \"ovn-controller-2mxr7-config-z5prv\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.137926 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.139445 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-combined-ca-bundle\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.139547 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-config-data\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.139639 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-426ss\" (UniqueName: \"kubernetes.io/projected/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-kube-api-access-426ss\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.139669 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-db-sync-config-data\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.143728 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-config-data\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.144404 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-combined-ca-bundle\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.147329 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-db-sync-config-data\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.157772 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-426ss\" (UniqueName: \"kubernetes.io/projected/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-kube-api-access-426ss\") pod \"glance-db-sync-pkkj5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.179204 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pkkj5" Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.427019 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2mxr7-config-z5prv"] Oct 11 03:02:29 crc kubenswrapper[4953]: I1011 03:02:29.762982 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-pkkj5"] Oct 11 03:02:29 crc kubenswrapper[4953]: W1011 03:02:29.773652 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod485ebf6f_d6da_48bf_9d6a_cb353d9082f5.slice/crio-8964670570d999e4d451afe4ba7949a17096865fac2090f232712a113ed53573 WatchSource:0}: Error finding container 8964670570d999e4d451afe4ba7949a17096865fac2090f232712a113ed53573: Status 404 returned error can't find the container with id 8964670570d999e4d451afe4ba7949a17096865fac2090f232712a113ed53573 Oct 11 03:02:30 crc kubenswrapper[4953]: I1011 03:02:30.403824 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pkkj5" event={"ID":"485ebf6f-d6da-48bf-9d6a-cb353d9082f5","Type":"ContainerStarted","Data":"8964670570d999e4d451afe4ba7949a17096865fac2090f232712a113ed53573"} Oct 11 03:02:30 crc kubenswrapper[4953]: I1011 03:02:30.405925 4953 generic.go:334] "Generic (PLEG): container finished" podID="fcb68216-af3d-4d8d-ba8c-344ca102474c" containerID="f8e175cff7befb9ea2cd0bab4a0a73abbfddc321e48177ee6ab9373439368529" exitCode=0 Oct 11 03:02:30 crc kubenswrapper[4953]: I1011 03:02:30.405985 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7-config-z5prv" event={"ID":"fcb68216-af3d-4d8d-ba8c-344ca102474c","Type":"ContainerDied","Data":"f8e175cff7befb9ea2cd0bab4a0a73abbfddc321e48177ee6ab9373439368529"} Oct 11 03:02:30 crc kubenswrapper[4953]: I1011 03:02:30.406023 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7-config-z5prv" event={"ID":"fcb68216-af3d-4d8d-ba8c-344ca102474c","Type":"ContainerStarted","Data":"97aff9328c698ddff6f550179352517ce65c7f5e556be452e50674557b5971c2"} Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.690929 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.784871 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-additional-scripts\") pod \"fcb68216-af3d-4d8d-ba8c-344ca102474c\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785076 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-scripts\") pod \"fcb68216-af3d-4d8d-ba8c-344ca102474c\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785156 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run\") pod \"fcb68216-af3d-4d8d-ba8c-344ca102474c\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785204 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-log-ovn\") pod \"fcb68216-af3d-4d8d-ba8c-344ca102474c\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785231 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run-ovn\") pod \"fcb68216-af3d-4d8d-ba8c-344ca102474c\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785261 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg4w7\" (UniqueName: \"kubernetes.io/projected/fcb68216-af3d-4d8d-ba8c-344ca102474c-kube-api-access-kg4w7\") pod \"fcb68216-af3d-4d8d-ba8c-344ca102474c\" (UID: \"fcb68216-af3d-4d8d-ba8c-344ca102474c\") " Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785279 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run" (OuterVolumeSpecName: "var-run") pod "fcb68216-af3d-4d8d-ba8c-344ca102474c" (UID: "fcb68216-af3d-4d8d-ba8c-344ca102474c"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785303 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fcb68216-af3d-4d8d-ba8c-344ca102474c" (UID: "fcb68216-af3d-4d8d-ba8c-344ca102474c"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785307 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fcb68216-af3d-4d8d-ba8c-344ca102474c" (UID: "fcb68216-af3d-4d8d-ba8c-344ca102474c"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785620 4953 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785647 4953 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785668 4953 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fcb68216-af3d-4d8d-ba8c-344ca102474c-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785598 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fcb68216-af3d-4d8d-ba8c-344ca102474c" (UID: "fcb68216-af3d-4d8d-ba8c-344ca102474c"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.785989 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-scripts" (OuterVolumeSpecName: "scripts") pod "fcb68216-af3d-4d8d-ba8c-344ca102474c" (UID: "fcb68216-af3d-4d8d-ba8c-344ca102474c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.798820 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcb68216-af3d-4d8d-ba8c-344ca102474c-kube-api-access-kg4w7" (OuterVolumeSpecName: "kube-api-access-kg4w7") pod "fcb68216-af3d-4d8d-ba8c-344ca102474c" (UID: "fcb68216-af3d-4d8d-ba8c-344ca102474c"). InnerVolumeSpecName "kube-api-access-kg4w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.887832 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.887877 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg4w7\" (UniqueName: \"kubernetes.io/projected/fcb68216-af3d-4d8d-ba8c-344ca102474c-kube-api-access-kg4w7\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:31 crc kubenswrapper[4953]: I1011 03:02:31.887888 4953 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fcb68216-af3d-4d8d-ba8c-344ca102474c-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.422038 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7-config-z5prv" event={"ID":"fcb68216-af3d-4d8d-ba8c-344ca102474c","Type":"ContainerDied","Data":"97aff9328c698ddff6f550179352517ce65c7f5e556be452e50674557b5971c2"} Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.422515 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97aff9328c698ddff6f550179352517ce65c7f5e556be452e50674557b5971c2" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.422102 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-z5prv" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.785151 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2mxr7-config-z5prv"] Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.797129 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-2mxr7-config-z5prv"] Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.830368 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2mxr7-config-kwsdc"] Oct 11 03:02:32 crc kubenswrapper[4953]: E1011 03:02:32.830749 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb68216-af3d-4d8d-ba8c-344ca102474c" containerName="ovn-config" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.830764 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb68216-af3d-4d8d-ba8c-344ca102474c" containerName="ovn-config" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.830970 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb68216-af3d-4d8d-ba8c-344ca102474c" containerName="ovn-config" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.831514 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.834775 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 11 03:02:32 crc kubenswrapper[4953]: I1011 03:02:32.842856 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2mxr7-config-kwsdc"] Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.003931 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run-ovn\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.004000 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-log-ovn\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.004088 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqz4z\" (UniqueName: \"kubernetes.io/projected/a9d3c338-2efa-430c-9788-282e15a37bfc-kube-api-access-pqz4z\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.004127 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-scripts\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.004285 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-additional-scripts\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.004367 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.106338 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-scripts\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.106429 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-additional-scripts\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.106452 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.106492 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run-ovn\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.106520 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-log-ovn\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.106552 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqz4z\" (UniqueName: \"kubernetes.io/projected/a9d3c338-2efa-430c-9788-282e15a37bfc-kube-api-access-pqz4z\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.107029 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-log-ovn\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.107032 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.107165 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run-ovn\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.107509 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-additional-scripts\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.108232 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-scripts\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.137813 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqz4z\" (UniqueName: \"kubernetes.io/projected/a9d3c338-2efa-430c-9788-282e15a37bfc-kube-api-access-pqz4z\") pod \"ovn-controller-2mxr7-config-kwsdc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.152625 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.522545 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-2mxr7" Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.624456 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2mxr7-config-kwsdc"] Oct 11 03:02:33 crc kubenswrapper[4953]: I1011 03:02:33.806797 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcb68216-af3d-4d8d-ba8c-344ca102474c" path="/var/lib/kubelet/pods/fcb68216-af3d-4d8d-ba8c-344ca102474c/volumes" Oct 11 03:02:34 crc kubenswrapper[4953]: I1011 03:02:34.446282 4953 generic.go:334] "Generic (PLEG): container finished" podID="a9d3c338-2efa-430c-9788-282e15a37bfc" containerID="d81deedc7cc8170421f7b175d6b1b18ca838616b25e7c1525e3135db5803f57a" exitCode=0 Oct 11 03:02:34 crc kubenswrapper[4953]: I1011 03:02:34.446334 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7-config-kwsdc" event={"ID":"a9d3c338-2efa-430c-9788-282e15a37bfc","Type":"ContainerDied","Data":"d81deedc7cc8170421f7b175d6b1b18ca838616b25e7c1525e3135db5803f57a"} Oct 11 03:02:34 crc kubenswrapper[4953]: I1011 03:02:34.446377 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7-config-kwsdc" event={"ID":"a9d3c338-2efa-430c-9788-282e15a37bfc","Type":"ContainerStarted","Data":"6d6dc1e96fdd6fa952ddfb79fb1c56f97efc05f6b5e1944b2c15021dd5ba47c2"} Oct 11 03:02:34 crc kubenswrapper[4953]: I1011 03:02:34.674853 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:02:34 crc kubenswrapper[4953]: I1011 03:02:34.955842 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.403759 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-579kj"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.413656 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-579kj" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.414610 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-579kj"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.511028 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-phsz2"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.521360 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.522667 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-phsz2"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.569156 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cs6j\" (UniqueName: \"kubernetes.io/projected/49cfe7db-879e-49c5-a881-df7508c56004-kube-api-access-8cs6j\") pod \"cinder-db-create-579kj\" (UID: \"49cfe7db-879e-49c5-a881-df7508c56004\") " pod="openstack/cinder-db-create-579kj" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.671130 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cs6j\" (UniqueName: \"kubernetes.io/projected/49cfe7db-879e-49c5-a881-df7508c56004-kube-api-access-8cs6j\") pod \"cinder-db-create-579kj\" (UID: \"49cfe7db-879e-49c5-a881-df7508c56004\") " pod="openstack/cinder-db-create-579kj" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.671252 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f58xz\" (UniqueName: \"kubernetes.io/projected/93572d4d-c90c-484e-a428-47bd0925abba-kube-api-access-f58xz\") pod \"barbican-db-create-phsz2\" (UID: \"93572d4d-c90c-484e-a428-47bd0925abba\") " pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.691754 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cs6j\" (UniqueName: \"kubernetes.io/projected/49cfe7db-879e-49c5-a881-df7508c56004-kube-api-access-8cs6j\") pod \"cinder-db-create-579kj\" (UID: \"49cfe7db-879e-49c5-a881-df7508c56004\") " pod="openstack/cinder-db-create-579kj" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.719064 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rjhfk"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.720413 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.726706 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rjhfk"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.734790 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-579kj" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.772379 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f58xz\" (UniqueName: \"kubernetes.io/projected/93572d4d-c90c-484e-a428-47bd0925abba-kube-api-access-f58xz\") pod \"barbican-db-create-phsz2\" (UID: \"93572d4d-c90c-484e-a428-47bd0925abba\") " pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.778383 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-zg5l4"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.779683 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.785049 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.785339 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.785474 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cz5lv" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.785691 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.800770 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f58xz\" (UniqueName: \"kubernetes.io/projected/93572d4d-c90c-484e-a428-47bd0925abba-kube-api-access-f58xz\") pod \"barbican-db-create-phsz2\" (UID: \"93572d4d-c90c-484e-a428-47bd0925abba\") " pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.804435 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-zg5l4"] Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.848308 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.873499 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-combined-ca-bundle\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.873548 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-config-data\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.873577 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd645\" (UniqueName: \"kubernetes.io/projected/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-kube-api-access-kd645\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.873682 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnqxx\" (UniqueName: \"kubernetes.io/projected/7b7bc820-b928-47b5-83b3-3bca09771317-kube-api-access-nnqxx\") pod \"neutron-db-create-rjhfk\" (UID: \"7b7bc820-b928-47b5-83b3-3bca09771317\") " pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.975905 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-combined-ca-bundle\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.976392 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-config-data\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.976422 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd645\" (UniqueName: \"kubernetes.io/projected/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-kube-api-access-kd645\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.976475 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnqxx\" (UniqueName: \"kubernetes.io/projected/7b7bc820-b928-47b5-83b3-3bca09771317-kube-api-access-nnqxx\") pod \"neutron-db-create-rjhfk\" (UID: \"7b7bc820-b928-47b5-83b3-3bca09771317\") " pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.983569 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-combined-ca-bundle\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.994051 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnqxx\" (UniqueName: \"kubernetes.io/projected/7b7bc820-b928-47b5-83b3-3bca09771317-kube-api-access-nnqxx\") pod \"neutron-db-create-rjhfk\" (UID: \"7b7bc820-b928-47b5-83b3-3bca09771317\") " pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:36 crc kubenswrapper[4953]: I1011 03:02:36.996210 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-config-data\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:37 crc kubenswrapper[4953]: I1011 03:02:37.003843 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd645\" (UniqueName: \"kubernetes.io/projected/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-kube-api-access-kd645\") pod \"keystone-db-sync-zg5l4\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:37 crc kubenswrapper[4953]: I1011 03:02:37.073708 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:37 crc kubenswrapper[4953]: I1011 03:02:37.123676 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:41 crc kubenswrapper[4953]: I1011 03:02:41.316614 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:02:41 crc kubenswrapper[4953]: I1011 03:02:41.317029 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:02:41 crc kubenswrapper[4953]: I1011 03:02:41.317071 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:02:41 crc kubenswrapper[4953]: I1011 03:02:41.317677 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c2039bb1b5e9ebe18e8fb7946b709aaacc565fd9ec85d3707d854610d50c294"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:02:41 crc kubenswrapper[4953]: I1011 03:02:41.317724 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://7c2039bb1b5e9ebe18e8fb7946b709aaacc565fd9ec85d3707d854610d50c294" gracePeriod=600 Oct 11 03:02:43 crc kubenswrapper[4953]: I1011 03:02:43.554846 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="7c2039bb1b5e9ebe18e8fb7946b709aaacc565fd9ec85d3707d854610d50c294" exitCode=0 Oct 11 03:02:43 crc kubenswrapper[4953]: I1011 03:02:43.554961 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"7c2039bb1b5e9ebe18e8fb7946b709aaacc565fd9ec85d3707d854610d50c294"} Oct 11 03:02:43 crc kubenswrapper[4953]: I1011 03:02:43.555241 4953 scope.go:117] "RemoveContainer" containerID="ac4cee146fbc1ec5589f7b5615f6ba984aa69377969ce0ce9a9209a5d1c58d45" Oct 11 03:02:45 crc kubenswrapper[4953]: E1011 03:02:45.403302 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Oct 11 03:02:45 crc kubenswrapper[4953]: E1011 03:02:45.403944 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-426ss,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-pkkj5_openstack(485ebf6f-d6da-48bf-9d6a-cb353d9082f5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 03:02:45 crc kubenswrapper[4953]: E1011 03:02:45.405200 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-pkkj5" podUID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.587496 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.590230 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2mxr7-config-kwsdc" event={"ID":"a9d3c338-2efa-430c-9788-282e15a37bfc","Type":"ContainerDied","Data":"6d6dc1e96fdd6fa952ddfb79fb1c56f97efc05f6b5e1944b2c15021dd5ba47c2"} Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.590335 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d6dc1e96fdd6fa952ddfb79fb1c56f97efc05f6b5e1944b2c15021dd5ba47c2" Oct 11 03:02:45 crc kubenswrapper[4953]: E1011 03:02:45.593283 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-pkkj5" podUID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.741566 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-log-ovn\") pod \"a9d3c338-2efa-430c-9788-282e15a37bfc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.741987 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-scripts\") pod \"a9d3c338-2efa-430c-9788-282e15a37bfc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742014 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-additional-scripts\") pod \"a9d3c338-2efa-430c-9788-282e15a37bfc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742061 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqz4z\" (UniqueName: \"kubernetes.io/projected/a9d3c338-2efa-430c-9788-282e15a37bfc-kube-api-access-pqz4z\") pod \"a9d3c338-2efa-430c-9788-282e15a37bfc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742078 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run-ovn\") pod \"a9d3c338-2efa-430c-9788-282e15a37bfc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.741781 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "a9d3c338-2efa-430c-9788-282e15a37bfc" (UID: "a9d3c338-2efa-430c-9788-282e15a37bfc"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742153 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run" (OuterVolumeSpecName: "var-run") pod "a9d3c338-2efa-430c-9788-282e15a37bfc" (UID: "a9d3c338-2efa-430c-9788-282e15a37bfc"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742122 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run\") pod \"a9d3c338-2efa-430c-9788-282e15a37bfc\" (UID: \"a9d3c338-2efa-430c-9788-282e15a37bfc\") " Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742263 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "a9d3c338-2efa-430c-9788-282e15a37bfc" (UID: "a9d3c338-2efa-430c-9788-282e15a37bfc"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742965 4953 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.742992 4953 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.743003 4953 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/a9d3c338-2efa-430c-9788-282e15a37bfc-var-run\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.744302 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "a9d3c338-2efa-430c-9788-282e15a37bfc" (UID: "a9d3c338-2efa-430c-9788-282e15a37bfc"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.744483 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-scripts" (OuterVolumeSpecName: "scripts") pod "a9d3c338-2efa-430c-9788-282e15a37bfc" (UID: "a9d3c338-2efa-430c-9788-282e15a37bfc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.750946 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9d3c338-2efa-430c-9788-282e15a37bfc-kube-api-access-pqz4z" (OuterVolumeSpecName: "kube-api-access-pqz4z") pod "a9d3c338-2efa-430c-9788-282e15a37bfc" (UID: "a9d3c338-2efa-430c-9788-282e15a37bfc"). InnerVolumeSpecName "kube-api-access-pqz4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.845181 4953 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.845242 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqz4z\" (UniqueName: \"kubernetes.io/projected/a9d3c338-2efa-430c-9788-282e15a37bfc-kube-api-access-pqz4z\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.845255 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a9d3c338-2efa-430c-9788-282e15a37bfc-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:45 crc kubenswrapper[4953]: I1011 03:02:45.972663 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-zg5l4"] Oct 11 03:02:45 crc kubenswrapper[4953]: W1011 03:02:45.977562 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17ba53d5_3bbc_41d3_98d8_6df81e07dc12.slice/crio-c2429ebb3759ee4698c0449b1cd3896b170e7c30711714b8cc938ee2f8bcf626 WatchSource:0}: Error finding container c2429ebb3759ee4698c0449b1cd3896b170e7c30711714b8cc938ee2f8bcf626: Status 404 returned error can't find the container with id c2429ebb3759ee4698c0449b1cd3896b170e7c30711714b8cc938ee2f8bcf626 Oct 11 03:02:46 crc kubenswrapper[4953]: W1011 03:02:46.054491 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93572d4d_c90c_484e_a428_47bd0925abba.slice/crio-f690de96da407f46642aeed116c4823fafa97a71a83cd3f70ba829002e67dba8 WatchSource:0}: Error finding container f690de96da407f46642aeed116c4823fafa97a71a83cd3f70ba829002e67dba8: Status 404 returned error can't find the container with id f690de96da407f46642aeed116c4823fafa97a71a83cd3f70ba829002e67dba8 Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.056476 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-phsz2"] Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.068475 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rjhfk"] Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.076845 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-579kj"] Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.599758 4953 generic.go:334] "Generic (PLEG): container finished" podID="93572d4d-c90c-484e-a428-47bd0925abba" containerID="999cee9c447248c53ecf119b5c2da3c9ba441c20076fb843035f30b811b85af2" exitCode=0 Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.599807 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-phsz2" event={"ID":"93572d4d-c90c-484e-a428-47bd0925abba","Type":"ContainerDied","Data":"999cee9c447248c53ecf119b5c2da3c9ba441c20076fb843035f30b811b85af2"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.599863 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-phsz2" event={"ID":"93572d4d-c90c-484e-a428-47bd0925abba","Type":"ContainerStarted","Data":"f690de96da407f46642aeed116c4823fafa97a71a83cd3f70ba829002e67dba8"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.603062 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"fb74160abbfed52859a5196152b1c675ce92c2f54edf843aaeabd1e6eeb4622a"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.605071 4953 generic.go:334] "Generic (PLEG): container finished" podID="7b7bc820-b928-47b5-83b3-3bca09771317" containerID="e9f236b70ec79c0316d27c17c307d607664c6988da9666c242f762c4aa168384" exitCode=0 Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.605129 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rjhfk" event={"ID":"7b7bc820-b928-47b5-83b3-3bca09771317","Type":"ContainerDied","Data":"e9f236b70ec79c0316d27c17c307d607664c6988da9666c242f762c4aa168384"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.605160 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rjhfk" event={"ID":"7b7bc820-b928-47b5-83b3-3bca09771317","Type":"ContainerStarted","Data":"81b50d7d94ebb6f7f136e0821520e489a6fa63ce6a1532f02ecd90ac6c261dd6"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.606141 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zg5l4" event={"ID":"17ba53d5-3bbc-41d3-98d8-6df81e07dc12","Type":"ContainerStarted","Data":"c2429ebb3759ee4698c0449b1cd3896b170e7c30711714b8cc938ee2f8bcf626"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.607331 4953 generic.go:334] "Generic (PLEG): container finished" podID="49cfe7db-879e-49c5-a881-df7508c56004" containerID="26fb9333ff0c208fec20bff39d7d613766b4902d27b3b818bf9b2c6e978be2d5" exitCode=0 Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.607368 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-579kj" event={"ID":"49cfe7db-879e-49c5-a881-df7508c56004","Type":"ContainerDied","Data":"26fb9333ff0c208fec20bff39d7d613766b4902d27b3b818bf9b2c6e978be2d5"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.607382 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-579kj" event={"ID":"49cfe7db-879e-49c5-a881-df7508c56004","Type":"ContainerStarted","Data":"66dd8058d73759e7211900ee7bf33d6307f88d1ff927e4e4642f1143e25ca110"} Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.607406 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2mxr7-config-kwsdc" Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.685883 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2mxr7-config-kwsdc"] Oct 11 03:02:46 crc kubenswrapper[4953]: I1011 03:02:46.704042 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-2mxr7-config-kwsdc"] Oct 11 03:02:47 crc kubenswrapper[4953]: I1011 03:02:47.804651 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9d3c338-2efa-430c-9788-282e15a37bfc" path="/var/lib/kubelet/pods/a9d3c338-2efa-430c-9788-282e15a37bfc/volumes" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.093570 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.102406 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.113053 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-579kj" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.141711 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cs6j\" (UniqueName: \"kubernetes.io/projected/49cfe7db-879e-49c5-a881-df7508c56004-kube-api-access-8cs6j\") pod \"49cfe7db-879e-49c5-a881-df7508c56004\" (UID: \"49cfe7db-879e-49c5-a881-df7508c56004\") " Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.141861 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f58xz\" (UniqueName: \"kubernetes.io/projected/93572d4d-c90c-484e-a428-47bd0925abba-kube-api-access-f58xz\") pod \"93572d4d-c90c-484e-a428-47bd0925abba\" (UID: \"93572d4d-c90c-484e-a428-47bd0925abba\") " Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.141914 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnqxx\" (UniqueName: \"kubernetes.io/projected/7b7bc820-b928-47b5-83b3-3bca09771317-kube-api-access-nnqxx\") pod \"7b7bc820-b928-47b5-83b3-3bca09771317\" (UID: \"7b7bc820-b928-47b5-83b3-3bca09771317\") " Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.149984 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93572d4d-c90c-484e-a428-47bd0925abba-kube-api-access-f58xz" (OuterVolumeSpecName: "kube-api-access-f58xz") pod "93572d4d-c90c-484e-a428-47bd0925abba" (UID: "93572d4d-c90c-484e-a428-47bd0925abba"). InnerVolumeSpecName "kube-api-access-f58xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.151261 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49cfe7db-879e-49c5-a881-df7508c56004-kube-api-access-8cs6j" (OuterVolumeSpecName: "kube-api-access-8cs6j") pod "49cfe7db-879e-49c5-a881-df7508c56004" (UID: "49cfe7db-879e-49c5-a881-df7508c56004"). InnerVolumeSpecName "kube-api-access-8cs6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.152616 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b7bc820-b928-47b5-83b3-3bca09771317-kube-api-access-nnqxx" (OuterVolumeSpecName: "kube-api-access-nnqxx") pod "7b7bc820-b928-47b5-83b3-3bca09771317" (UID: "7b7bc820-b928-47b5-83b3-3bca09771317"). InnerVolumeSpecName "kube-api-access-nnqxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.244239 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f58xz\" (UniqueName: \"kubernetes.io/projected/93572d4d-c90c-484e-a428-47bd0925abba-kube-api-access-f58xz\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.244278 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnqxx\" (UniqueName: \"kubernetes.io/projected/7b7bc820-b928-47b5-83b3-3bca09771317-kube-api-access-nnqxx\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.244288 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cs6j\" (UniqueName: \"kubernetes.io/projected/49cfe7db-879e-49c5-a881-df7508c56004-kube-api-access-8cs6j\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.646999 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-phsz2" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.657030 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-phsz2" event={"ID":"93572d4d-c90c-484e-a428-47bd0925abba","Type":"ContainerDied","Data":"f690de96da407f46642aeed116c4823fafa97a71a83cd3f70ba829002e67dba8"} Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.657063 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f690de96da407f46642aeed116c4823fafa97a71a83cd3f70ba829002e67dba8" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.660128 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rjhfk" event={"ID":"7b7bc820-b928-47b5-83b3-3bca09771317","Type":"ContainerDied","Data":"81b50d7d94ebb6f7f136e0821520e489a6fa63ce6a1532f02ecd90ac6c261dd6"} Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.660155 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81b50d7d94ebb6f7f136e0821520e489a6fa63ce6a1532f02ecd90ac6c261dd6" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.660213 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rjhfk" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.664010 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zg5l4" event={"ID":"17ba53d5-3bbc-41d3-98d8-6df81e07dc12","Type":"ContainerStarted","Data":"4688b8bc882972856ba44f9bc23cd2d7814f42cdd5ae2608f664e3eb67f8daab"} Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.666775 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-579kj" event={"ID":"49cfe7db-879e-49c5-a881-df7508c56004","Type":"ContainerDied","Data":"66dd8058d73759e7211900ee7bf33d6307f88d1ff927e4e4642f1143e25ca110"} Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.666799 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66dd8058d73759e7211900ee7bf33d6307f88d1ff927e4e4642f1143e25ca110" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.666833 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-579kj" Oct 11 03:02:50 crc kubenswrapper[4953]: I1011 03:02:50.698466 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-zg5l4" podStartSLOduration=10.517895111 podStartE2EDuration="14.698444458s" podCreationTimestamp="2025-10-11 03:02:36 +0000 UTC" firstStartedPulling="2025-10-11 03:02:45.98010278 +0000 UTC m=+976.913190414" lastFinishedPulling="2025-10-11 03:02:50.160652117 +0000 UTC m=+981.093739761" observedRunningTime="2025-10-11 03:02:50.688805232 +0000 UTC m=+981.621892896" watchObservedRunningTime="2025-10-11 03:02:50.698444458 +0000 UTC m=+981.631532092" Oct 11 03:02:53 crc kubenswrapper[4953]: I1011 03:02:53.701339 4953 generic.go:334] "Generic (PLEG): container finished" podID="17ba53d5-3bbc-41d3-98d8-6df81e07dc12" containerID="4688b8bc882972856ba44f9bc23cd2d7814f42cdd5ae2608f664e3eb67f8daab" exitCode=0 Oct 11 03:02:53 crc kubenswrapper[4953]: I1011 03:02:53.701553 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zg5l4" event={"ID":"17ba53d5-3bbc-41d3-98d8-6df81e07dc12","Type":"ContainerDied","Data":"4688b8bc882972856ba44f9bc23cd2d7814f42cdd5ae2608f664e3eb67f8daab"} Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.101305 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.240169 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kd645\" (UniqueName: \"kubernetes.io/projected/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-kube-api-access-kd645\") pod \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.240363 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-config-data\") pod \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.240619 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-combined-ca-bundle\") pod \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\" (UID: \"17ba53d5-3bbc-41d3-98d8-6df81e07dc12\") " Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.248349 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-kube-api-access-kd645" (OuterVolumeSpecName: "kube-api-access-kd645") pod "17ba53d5-3bbc-41d3-98d8-6df81e07dc12" (UID: "17ba53d5-3bbc-41d3-98d8-6df81e07dc12"). InnerVolumeSpecName "kube-api-access-kd645". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.268821 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17ba53d5-3bbc-41d3-98d8-6df81e07dc12" (UID: "17ba53d5-3bbc-41d3-98d8-6df81e07dc12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.284596 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-config-data" (OuterVolumeSpecName: "config-data") pod "17ba53d5-3bbc-41d3-98d8-6df81e07dc12" (UID: "17ba53d5-3bbc-41d3-98d8-6df81e07dc12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.342271 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.342321 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kd645\" (UniqueName: \"kubernetes.io/projected/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-kube-api-access-kd645\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.342342 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17ba53d5-3bbc-41d3-98d8-6df81e07dc12-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.723364 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-zg5l4" event={"ID":"17ba53d5-3bbc-41d3-98d8-6df81e07dc12","Type":"ContainerDied","Data":"c2429ebb3759ee4698c0449b1cd3896b170e7c30711714b8cc938ee2f8bcf626"} Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.723408 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-zg5l4" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.723425 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2429ebb3759ee4698c0449b1cd3896b170e7c30711714b8cc938ee2f8bcf626" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.970921 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75bb4695fc-zqsrn"] Oct 11 03:02:55 crc kubenswrapper[4953]: E1011 03:02:55.971226 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93572d4d-c90c-484e-a428-47bd0925abba" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971239 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="93572d4d-c90c-484e-a428-47bd0925abba" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: E1011 03:02:55.971253 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17ba53d5-3bbc-41d3-98d8-6df81e07dc12" containerName="keystone-db-sync" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971259 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="17ba53d5-3bbc-41d3-98d8-6df81e07dc12" containerName="keystone-db-sync" Oct 11 03:02:55 crc kubenswrapper[4953]: E1011 03:02:55.971276 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49cfe7db-879e-49c5-a881-df7508c56004" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971282 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="49cfe7db-879e-49c5-a881-df7508c56004" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: E1011 03:02:55.971293 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d3c338-2efa-430c-9788-282e15a37bfc" containerName="ovn-config" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971298 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d3c338-2efa-430c-9788-282e15a37bfc" containerName="ovn-config" Oct 11 03:02:55 crc kubenswrapper[4953]: E1011 03:02:55.971313 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b7bc820-b928-47b5-83b3-3bca09771317" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971319 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b7bc820-b928-47b5-83b3-3bca09771317" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971458 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="49cfe7db-879e-49c5-a881-df7508c56004" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971474 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="93572d4d-c90c-484e-a428-47bd0925abba" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971483 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b7bc820-b928-47b5-83b3-3bca09771317" containerName="mariadb-database-create" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971493 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="17ba53d5-3bbc-41d3-98d8-6df81e07dc12" containerName="keystone-db-sync" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.971504 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9d3c338-2efa-430c-9788-282e15a37bfc" containerName="ovn-config" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.972269 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:55 crc kubenswrapper[4953]: I1011 03:02:55.983351 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bb4695fc-zqsrn"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.026352 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-4nkqg"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.027384 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.032685 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.032761 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cz5lv" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.032945 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.033092 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.034339 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4nkqg"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.055949 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-credential-keys\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.055997 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-scripts\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056031 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-fernet-keys\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056052 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-nb\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056069 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-sb\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056090 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-combined-ca-bundle\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056108 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-config-data\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056123 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqz6n\" (UniqueName: \"kubernetes.io/projected/06365e70-4753-4473-a07e-e35418124a5c-kube-api-access-wqz6n\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056184 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2tm8\" (UniqueName: \"kubernetes.io/projected/a229e917-6997-4fc1-8bc8-178f74594670-kube-api-access-d2tm8\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056202 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-config\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.056218 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-dns-svc\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157586 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-credential-keys\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157651 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-scripts\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157687 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-fernet-keys\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157710 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-nb\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157725 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-sb\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157745 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-combined-ca-bundle\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157765 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqz6n\" (UniqueName: \"kubernetes.io/projected/06365e70-4753-4473-a07e-e35418124a5c-kube-api-access-wqz6n\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157782 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-config-data\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.157856 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2tm8\" (UniqueName: \"kubernetes.io/projected/a229e917-6997-4fc1-8bc8-178f74594670-kube-api-access-d2tm8\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.158308 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-dns-svc\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.158328 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-config\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.158640 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-sb\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.158841 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-nb\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.158947 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-config\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.158971 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-dns-svc\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.160039 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.162890 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.163871 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-fernet-keys\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.164087 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-scripts\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.165502 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-config-data\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.165557 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.165720 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.167220 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-credential-keys\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.168022 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-combined-ca-bundle\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.183631 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2tm8\" (UniqueName: \"kubernetes.io/projected/a229e917-6997-4fc1-8bc8-178f74594670-kube-api-access-d2tm8\") pod \"dnsmasq-dns-75bb4695fc-zqsrn\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.183689 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.192203 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqz6n\" (UniqueName: \"kubernetes.io/projected/06365e70-4753-4473-a07e-e35418124a5c-kube-api-access-wqz6n\") pod \"keystone-bootstrap-4nkqg\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.258833 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-run-httpd\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.258873 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-scripts\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.258900 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-config-data\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.258915 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-log-httpd\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.258962 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.258982 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.259007 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5q8f\" (UniqueName: \"kubernetes.io/projected/739f1a09-0ddc-41e3-bfe8-f482621c83a4-kube-api-access-v5q8f\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.290931 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.338583 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bb4695fc-zqsrn"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359782 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359830 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359858 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5q8f\" (UniqueName: \"kubernetes.io/projected/739f1a09-0ddc-41e3-bfe8-f482621c83a4-kube-api-access-v5q8f\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359924 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-run-httpd\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359941 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-scripts\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359961 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-config-data\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.359978 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-log-httpd\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.360367 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-log-httpd\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.360547 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-run-httpd\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.363903 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.364175 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.365275 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-scripts\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.369933 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-config-data\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.374383 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.379029 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-745b9ddc8c-dzp8h"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.380376 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.393542 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-745b9ddc8c-dzp8h"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.394423 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5q8f\" (UniqueName: \"kubernetes.io/projected/739f1a09-0ddc-41e3-bfe8-f482621c83a4-kube-api-access-v5q8f\") pod \"ceilometer-0\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.431995 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-2wgl5"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.433760 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.438142 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.438533 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-755v9" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.438673 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.461994 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2wgl5"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462290 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-scripts\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462337 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-combined-ca-bundle\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462367 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-nb\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462404 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzw5k\" (UniqueName: \"kubernetes.io/projected/d0276760-d6d3-487c-bf61-561381a0f68b-kube-api-access-jzw5k\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462427 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-sb\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462442 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d5v6\" (UniqueName: \"kubernetes.io/projected/421c9157-2796-4c8a-84db-ae0a142d1155-kube-api-access-4d5v6\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462501 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/421c9157-2796-4c8a-84db-ae0a142d1155-logs\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462562 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-dns-svc\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462587 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-config-data\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.462641 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-config\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.504157 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6d14-account-create-vdfm8"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.505837 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.507841 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.526664 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6d14-account-create-vdfm8"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.541119 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563566 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-dns-svc\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563634 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-config-data\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563669 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-config\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563713 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-scripts\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563731 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-combined-ca-bundle\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563753 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-nb\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563774 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzw5k\" (UniqueName: \"kubernetes.io/projected/d0276760-d6d3-487c-bf61-561381a0f68b-kube-api-access-jzw5k\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563794 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d5v6\" (UniqueName: \"kubernetes.io/projected/421c9157-2796-4c8a-84db-ae0a142d1155-kube-api-access-4d5v6\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563810 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-sb\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.563854 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/421c9157-2796-4c8a-84db-ae0a142d1155-logs\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.564268 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/421c9157-2796-4c8a-84db-ae0a142d1155-logs\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.564997 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-dns-svc\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.566743 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-sb\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.567348 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-config\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.567380 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-nb\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.571762 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-scripts\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.572146 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-combined-ca-bundle\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.572244 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-config-data\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.592301 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzw5k\" (UniqueName: \"kubernetes.io/projected/d0276760-d6d3-487c-bf61-561381a0f68b-kube-api-access-jzw5k\") pod \"dnsmasq-dns-745b9ddc8c-dzp8h\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.593819 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d5v6\" (UniqueName: \"kubernetes.io/projected/421c9157-2796-4c8a-84db-ae0a142d1155-kube-api-access-4d5v6\") pod \"placement-db-sync-2wgl5\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.633762 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-19a9-account-create-cj7vv"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.634803 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.639122 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.654938 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-19a9-account-create-cj7vv"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.669744 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ghv5\" (UniqueName: \"kubernetes.io/projected/e5af1453-5d36-4384-860e-9b53ee150124-kube-api-access-9ghv5\") pod \"barbican-6d14-account-create-vdfm8\" (UID: \"e5af1453-5d36-4384-860e-9b53ee150124\") " pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.773484 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.784293 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpzsg\" (UniqueName: \"kubernetes.io/projected/230c8b0d-9a31-45b6-8466-443ee8c0cfd4-kube-api-access-hpzsg\") pod \"cinder-19a9-account-create-cj7vv\" (UID: \"230c8b0d-9a31-45b6-8466-443ee8c0cfd4\") " pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.784476 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ghv5\" (UniqueName: \"kubernetes.io/projected/e5af1453-5d36-4384-860e-9b53ee150124-kube-api-access-9ghv5\") pod \"barbican-6d14-account-create-vdfm8\" (UID: \"e5af1453-5d36-4384-860e-9b53ee150124\") " pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.802064 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2wgl5" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.856675 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-eeb5-account-create-djwqq"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.859046 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.863033 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.874157 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ghv5\" (UniqueName: \"kubernetes.io/projected/e5af1453-5d36-4384-860e-9b53ee150124-kube-api-access-9ghv5\") pod \"barbican-6d14-account-create-vdfm8\" (UID: \"e5af1453-5d36-4384-860e-9b53ee150124\") " pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.885890 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpzsg\" (UniqueName: \"kubernetes.io/projected/230c8b0d-9a31-45b6-8466-443ee8c0cfd4-kube-api-access-hpzsg\") pod \"cinder-19a9-account-create-cj7vv\" (UID: \"230c8b0d-9a31-45b6-8466-443ee8c0cfd4\") " pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.897995 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-eeb5-account-create-djwqq"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.906459 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpzsg\" (UniqueName: \"kubernetes.io/projected/230c8b0d-9a31-45b6-8466-443ee8c0cfd4-kube-api-access-hpzsg\") pod \"cinder-19a9-account-create-cj7vv\" (UID: \"230c8b0d-9a31-45b6-8466-443ee8c0cfd4\") " pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.972888 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bb4695fc-zqsrn"] Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.978988 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:02:56 crc kubenswrapper[4953]: I1011 03:02:56.991857 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7f9l\" (UniqueName: \"kubernetes.io/projected/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a-kube-api-access-v7f9l\") pod \"neutron-eeb5-account-create-djwqq\" (UID: \"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a\") " pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.096722 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7f9l\" (UniqueName: \"kubernetes.io/projected/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a-kube-api-access-v7f9l\") pod \"neutron-eeb5-account-create-djwqq\" (UID: \"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a\") " pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.125671 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7f9l\" (UniqueName: \"kubernetes.io/projected/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a-kube-api-access-v7f9l\") pod \"neutron-eeb5-account-create-djwqq\" (UID: \"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a\") " pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.136062 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.152219 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-4nkqg"] Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.188277 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.198261 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-745b9ddc8c-dzp8h"] Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.286027 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.404105 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-19a9-account-create-cj7vv"] Oct 11 03:02:57 crc kubenswrapper[4953]: W1011 03:02:57.417655 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod230c8b0d_9a31_45b6_8466_443ee8c0cfd4.slice/crio-3e833a11b260969326bac167c4ea2e44c87de2db86bf5e947f1f14ce56e6981c WatchSource:0}: Error finding container 3e833a11b260969326bac167c4ea2e44c87de2db86bf5e947f1f14ce56e6981c: Status 404 returned error can't find the container with id 3e833a11b260969326bac167c4ea2e44c87de2db86bf5e947f1f14ce56e6981c Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.454535 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-2wgl5"] Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.733761 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6d14-account-create-vdfm8"] Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.808072 4953 generic.go:334] "Generic (PLEG): container finished" podID="d0276760-d6d3-487c-bf61-561381a0f68b" containerID="53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690" exitCode=0 Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.851307 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" event={"ID":"d0276760-d6d3-487c-bf61-561381a0f68b","Type":"ContainerDied","Data":"53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.851350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" event={"ID":"d0276760-d6d3-487c-bf61-561381a0f68b","Type":"ContainerStarted","Data":"52afd50afcdf268ced1ae4165ace597776f4e7542a07372332f6b7ccc12eac00"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.851363 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-eeb5-account-create-djwqq"] Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.851378 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4nkqg" event={"ID":"06365e70-4753-4473-a07e-e35418124a5c","Type":"ContainerStarted","Data":"d0be1edac47fcdf9c60fce64ee411e7e027691a247156c9796c49933c73ca8c0"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.851400 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4nkqg" event={"ID":"06365e70-4753-4473-a07e-e35418124a5c","Type":"ContainerStarted","Data":"1f7c39115678b9992b733e2306c4643fc5f40f206b0e4fd268bdb99964cef33d"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.858502 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2wgl5" event={"ID":"421c9157-2796-4c8a-84db-ae0a142d1155","Type":"ContainerStarted","Data":"f5842fc0549a7d36f5ad1b6f359aca588d49fa53668253587b274953d79eae96"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.869477 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-4nkqg" podStartSLOduration=2.869454744 podStartE2EDuration="2.869454744s" podCreationTimestamp="2025-10-11 03:02:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:02:57.859011467 +0000 UTC m=+988.792099111" watchObservedRunningTime="2025-10-11 03:02:57.869454744 +0000 UTC m=+988.802542388" Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.878631 4953 generic.go:334] "Generic (PLEG): container finished" podID="a229e917-6997-4fc1-8bc8-178f74594670" containerID="c09a20b7c5bfe0218bed4825aa17eb4fb81a96c92046d55ec4308614ca18b85f" exitCode=0 Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.878677 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" event={"ID":"a229e917-6997-4fc1-8bc8-178f74594670","Type":"ContainerDied","Data":"c09a20b7c5bfe0218bed4825aa17eb4fb81a96c92046d55ec4308614ca18b85f"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.878717 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" event={"ID":"a229e917-6997-4fc1-8bc8-178f74594670","Type":"ContainerStarted","Data":"128f81130f5cde56b247da6e10ba120e1ca3e082897c6e68c3ef784dfbf62fa9"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.898392 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-19a9-account-create-cj7vv" event={"ID":"230c8b0d-9a31-45b6-8466-443ee8c0cfd4","Type":"ContainerStarted","Data":"02a904a45945aca7a7118b1402803b912d5a38c67fce97f29b1cec28abd86e31"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.898435 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-19a9-account-create-cj7vv" event={"ID":"230c8b0d-9a31-45b6-8466-443ee8c0cfd4","Type":"ContainerStarted","Data":"3e833a11b260969326bac167c4ea2e44c87de2db86bf5e947f1f14ce56e6981c"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.911676 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerStarted","Data":"fa342d7c42dba58e2848e0f31a211085932e11b925cb15a9958fac7a4358ab34"} Oct 11 03:02:57 crc kubenswrapper[4953]: I1011 03:02:57.937627 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.415003 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.524068 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2tm8\" (UniqueName: \"kubernetes.io/projected/a229e917-6997-4fc1-8bc8-178f74594670-kube-api-access-d2tm8\") pod \"a229e917-6997-4fc1-8bc8-178f74594670\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.524133 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-dns-svc\") pod \"a229e917-6997-4fc1-8bc8-178f74594670\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.524197 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-sb\") pod \"a229e917-6997-4fc1-8bc8-178f74594670\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.524265 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-config\") pod \"a229e917-6997-4fc1-8bc8-178f74594670\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.524328 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-nb\") pod \"a229e917-6997-4fc1-8bc8-178f74594670\" (UID: \"a229e917-6997-4fc1-8bc8-178f74594670\") " Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.530810 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a229e917-6997-4fc1-8bc8-178f74594670-kube-api-access-d2tm8" (OuterVolumeSpecName: "kube-api-access-d2tm8") pod "a229e917-6997-4fc1-8bc8-178f74594670" (UID: "a229e917-6997-4fc1-8bc8-178f74594670"). InnerVolumeSpecName "kube-api-access-d2tm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.554965 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a229e917-6997-4fc1-8bc8-178f74594670" (UID: "a229e917-6997-4fc1-8bc8-178f74594670"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.556115 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-config" (OuterVolumeSpecName: "config") pod "a229e917-6997-4fc1-8bc8-178f74594670" (UID: "a229e917-6997-4fc1-8bc8-178f74594670"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.558353 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a229e917-6997-4fc1-8bc8-178f74594670" (UID: "a229e917-6997-4fc1-8bc8-178f74594670"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.559916 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a229e917-6997-4fc1-8bc8-178f74594670" (UID: "a229e917-6997-4fc1-8bc8-178f74594670"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.626651 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.626686 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.626699 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.626708 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2tm8\" (UniqueName: \"kubernetes.io/projected/a229e917-6997-4fc1-8bc8-178f74594670-kube-api-access-d2tm8\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.626718 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a229e917-6997-4fc1-8bc8-178f74594670-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.921000 4953 generic.go:334] "Generic (PLEG): container finished" podID="230c8b0d-9a31-45b6-8466-443ee8c0cfd4" containerID="02a904a45945aca7a7118b1402803b912d5a38c67fce97f29b1cec28abd86e31" exitCode=0 Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.921060 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-19a9-account-create-cj7vv" event={"ID":"230c8b0d-9a31-45b6-8466-443ee8c0cfd4","Type":"ContainerDied","Data":"02a904a45945aca7a7118b1402803b912d5a38c67fce97f29b1cec28abd86e31"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.923299 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" event={"ID":"d0276760-d6d3-487c-bf61-561381a0f68b","Type":"ContainerStarted","Data":"67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.923453 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.924480 4953 generic.go:334] "Generic (PLEG): container finished" podID="e5af1453-5d36-4384-860e-9b53ee150124" containerID="8b2e24d5954aee4beaa306dd3ea57854286c6c0168c083729183a15f9d79418d" exitCode=0 Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.924663 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6d14-account-create-vdfm8" event={"ID":"e5af1453-5d36-4384-860e-9b53ee150124","Type":"ContainerDied","Data":"8b2e24d5954aee4beaa306dd3ea57854286c6c0168c083729183a15f9d79418d"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.924691 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6d14-account-create-vdfm8" event={"ID":"e5af1453-5d36-4384-860e-9b53ee150124","Type":"ContainerStarted","Data":"749cb04eb15c0ea6672d17271adcf94a313e5d7c8606fe3aa08effa33efb7009"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.926033 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pkkj5" event={"ID":"485ebf6f-d6da-48bf-9d6a-cb353d9082f5","Type":"ContainerStarted","Data":"27eaa366ade9b3131fe3659b6c21d0deba1eea6513172aaa569b58b77ba2c2ee"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.936187 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.936191 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bb4695fc-zqsrn" event={"ID":"a229e917-6997-4fc1-8bc8-178f74594670","Type":"ContainerDied","Data":"128f81130f5cde56b247da6e10ba120e1ca3e082897c6e68c3ef784dfbf62fa9"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.936326 4953 scope.go:117] "RemoveContainer" containerID="c09a20b7c5bfe0218bed4825aa17eb4fb81a96c92046d55ec4308614ca18b85f" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.943143 4953 generic.go:334] "Generic (PLEG): container finished" podID="5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a" containerID="3e8d13299e7f80d9203170c5a98bd3fe5d9f86a1cde1a5fabe6df12d86995aec" exitCode=0 Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.944499 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-eeb5-account-create-djwqq" event={"ID":"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a","Type":"ContainerDied","Data":"3e8d13299e7f80d9203170c5a98bd3fe5d9f86a1cde1a5fabe6df12d86995aec"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.944588 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-eeb5-account-create-djwqq" event={"ID":"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a","Type":"ContainerStarted","Data":"63aa4b493f43a541a5bb585bece3e388b465a55b5eeade57cdf90d1266ecb4da"} Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.951387 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" podStartSLOduration=2.951364366 podStartE2EDuration="2.951364366s" podCreationTimestamp="2025-10-11 03:02:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:02:58.94604996 +0000 UTC m=+989.879137624" watchObservedRunningTime="2025-10-11 03:02:58.951364366 +0000 UTC m=+989.884452010" Oct 11 03:02:58 crc kubenswrapper[4953]: I1011 03:02:58.982031 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-pkkj5" podStartSLOduration=3.216671008 podStartE2EDuration="30.982006498s" podCreationTimestamp="2025-10-11 03:02:28 +0000 UTC" firstStartedPulling="2025-10-11 03:02:29.775395081 +0000 UTC m=+960.708482725" lastFinishedPulling="2025-10-11 03:02:57.540730571 +0000 UTC m=+988.473818215" observedRunningTime="2025-10-11 03:02:58.963347082 +0000 UTC m=+989.896434726" watchObservedRunningTime="2025-10-11 03:02:58.982006498 +0000 UTC m=+989.915094142" Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.080109 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bb4695fc-zqsrn"] Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.087351 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75bb4695fc-zqsrn"] Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.357957 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.444089 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpzsg\" (UniqueName: \"kubernetes.io/projected/230c8b0d-9a31-45b6-8466-443ee8c0cfd4-kube-api-access-hpzsg\") pod \"230c8b0d-9a31-45b6-8466-443ee8c0cfd4\" (UID: \"230c8b0d-9a31-45b6-8466-443ee8c0cfd4\") " Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.471955 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/230c8b0d-9a31-45b6-8466-443ee8c0cfd4-kube-api-access-hpzsg" (OuterVolumeSpecName: "kube-api-access-hpzsg") pod "230c8b0d-9a31-45b6-8466-443ee8c0cfd4" (UID: "230c8b0d-9a31-45b6-8466-443ee8c0cfd4"). InnerVolumeSpecName "kube-api-access-hpzsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.546706 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpzsg\" (UniqueName: \"kubernetes.io/projected/230c8b0d-9a31-45b6-8466-443ee8c0cfd4-kube-api-access-hpzsg\") on node \"crc\" DevicePath \"\"" Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.805122 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a229e917-6997-4fc1-8bc8-178f74594670" path="/var/lib/kubelet/pods/a229e917-6997-4fc1-8bc8-178f74594670/volumes" Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.956885 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-19a9-account-create-cj7vv" event={"ID":"230c8b0d-9a31-45b6-8466-443ee8c0cfd4","Type":"ContainerDied","Data":"3e833a11b260969326bac167c4ea2e44c87de2db86bf5e947f1f14ce56e6981c"} Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.956937 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e833a11b260969326bac167c4ea2e44c87de2db86bf5e947f1f14ce56e6981c" Oct 11 03:02:59 crc kubenswrapper[4953]: I1011 03:02:59.956952 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-19a9-account-create-cj7vv" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.873352 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-5xv4s"] Oct 11 03:03:01 crc kubenswrapper[4953]: E1011 03:03:01.874093 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a229e917-6997-4fc1-8bc8-178f74594670" containerName="init" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.874110 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a229e917-6997-4fc1-8bc8-178f74594670" containerName="init" Oct 11 03:03:01 crc kubenswrapper[4953]: E1011 03:03:01.874136 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="230c8b0d-9a31-45b6-8466-443ee8c0cfd4" containerName="mariadb-account-create" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.874143 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="230c8b0d-9a31-45b6-8466-443ee8c0cfd4" containerName="mariadb-account-create" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.874288 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a229e917-6997-4fc1-8bc8-178f74594670" containerName="init" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.874315 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="230c8b0d-9a31-45b6-8466-443ee8c0cfd4" containerName="mariadb-account-create" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.874858 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.876827 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.877015 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-65stp" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.877034 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.885351 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5xv4s"] Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.979376 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-eeb5-account-create-djwqq" event={"ID":"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a","Type":"ContainerDied","Data":"63aa4b493f43a541a5bb585bece3e388b465a55b5eeade57cdf90d1266ecb4da"} Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.979423 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63aa4b493f43a541a5bb585bece3e388b465a55b5eeade57cdf90d1266ecb4da" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.992116 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-config-data\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.992155 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-combined-ca-bundle\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.992182 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37b17e82-1098-40f6-9a8b-ff2e863e5559-etc-machine-id\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.992206 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-scripts\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.992236 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-db-sync-config-data\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:01 crc kubenswrapper[4953]: I1011 03:03:01.992252 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m64zw\" (UniqueName: \"kubernetes.io/projected/37b17e82-1098-40f6-9a8b-ff2e863e5559-kube-api-access-m64zw\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.047053 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.056313 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.096330 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-db-sync-config-data\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.096377 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m64zw\" (UniqueName: \"kubernetes.io/projected/37b17e82-1098-40f6-9a8b-ff2e863e5559-kube-api-access-m64zw\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.096817 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-config-data\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.096862 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-combined-ca-bundle\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.096905 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37b17e82-1098-40f6-9a8b-ff2e863e5559-etc-machine-id\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.096998 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-scripts\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.097641 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37b17e82-1098-40f6-9a8b-ff2e863e5559-etc-machine-id\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.106768 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-scripts\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.108201 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-config-data\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.109074 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-db-sync-config-data\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.109714 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-combined-ca-bundle\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.120284 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m64zw\" (UniqueName: \"kubernetes.io/projected/37b17e82-1098-40f6-9a8b-ff2e863e5559-kube-api-access-m64zw\") pod \"cinder-db-sync-5xv4s\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.192231 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.198614 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ghv5\" (UniqueName: \"kubernetes.io/projected/e5af1453-5d36-4384-860e-9b53ee150124-kube-api-access-9ghv5\") pod \"e5af1453-5d36-4384-860e-9b53ee150124\" (UID: \"e5af1453-5d36-4384-860e-9b53ee150124\") " Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.198862 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7f9l\" (UniqueName: \"kubernetes.io/projected/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a-kube-api-access-v7f9l\") pod \"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a\" (UID: \"5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a\") " Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.202654 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a-kube-api-access-v7f9l" (OuterVolumeSpecName: "kube-api-access-v7f9l") pod "5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a" (UID: "5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a"). InnerVolumeSpecName "kube-api-access-v7f9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.203246 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5af1453-5d36-4384-860e-9b53ee150124-kube-api-access-9ghv5" (OuterVolumeSpecName: "kube-api-access-9ghv5") pod "e5af1453-5d36-4384-860e-9b53ee150124" (UID: "e5af1453-5d36-4384-860e-9b53ee150124"). InnerVolumeSpecName "kube-api-access-9ghv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.300418 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7f9l\" (UniqueName: \"kubernetes.io/projected/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a-kube-api-access-v7f9l\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.300460 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ghv5\" (UniqueName: \"kubernetes.io/projected/e5af1453-5d36-4384-860e-9b53ee150124-kube-api-access-9ghv5\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.995149 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6d14-account-create-vdfm8" event={"ID":"e5af1453-5d36-4384-860e-9b53ee150124","Type":"ContainerDied","Data":"749cb04eb15c0ea6672d17271adcf94a313e5d7c8606fe3aa08effa33efb7009"} Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.995193 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="749cb04eb15c0ea6672d17271adcf94a313e5d7c8606fe3aa08effa33efb7009" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.995205 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6d14-account-create-vdfm8" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.998718 4953 generic.go:334] "Generic (PLEG): container finished" podID="06365e70-4753-4473-a07e-e35418124a5c" containerID="d0be1edac47fcdf9c60fce64ee411e7e027691a247156c9796c49933c73ca8c0" exitCode=0 Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.998796 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-eeb5-account-create-djwqq" Oct 11 03:03:02 crc kubenswrapper[4953]: I1011 03:03:02.999811 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4nkqg" event={"ID":"06365e70-4753-4473-a07e-e35418124a5c","Type":"ContainerDied","Data":"d0be1edac47fcdf9c60fce64ee411e7e027691a247156c9796c49933c73ca8c0"} Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.775868 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.851459 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-z6xn5"] Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.852266 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerName="dnsmasq-dns" containerID="cri-o://87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119" gracePeriod=10 Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.913747 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-txcgm"] Oct 11 03:03:06 crc kubenswrapper[4953]: E1011 03:03:06.914146 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a" containerName="mariadb-account-create" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.914163 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a" containerName="mariadb-account-create" Oct 11 03:03:06 crc kubenswrapper[4953]: E1011 03:03:06.914176 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5af1453-5d36-4384-860e-9b53ee150124" containerName="mariadb-account-create" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.914183 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5af1453-5d36-4384-860e-9b53ee150124" containerName="mariadb-account-create" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.914344 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5af1453-5d36-4384-860e-9b53ee150124" containerName="mariadb-account-create" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.914375 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a" containerName="mariadb-account-create" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.914956 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.920116 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.920584 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rsld9" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.938455 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-txcgm"] Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.988527 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-combined-ca-bundle\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.988692 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96h8d\" (UniqueName: \"kubernetes.io/projected/a800d96f-d6e6-4698-8da8-a27dab6454b7-kube-api-access-96h8d\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:06 crc kubenswrapper[4953]: I1011 03:03:06.988719 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-db-sync-config-data\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.089666 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-combined-ca-bundle\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.089739 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96h8d\" (UniqueName: \"kubernetes.io/projected/a800d96f-d6e6-4698-8da8-a27dab6454b7-kube-api-access-96h8d\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.089760 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-db-sync-config-data\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.096351 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-combined-ca-bundle\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.100197 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-db-sync-config-data\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.109338 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96h8d\" (UniqueName: \"kubernetes.io/projected/a800d96f-d6e6-4698-8da8-a27dab6454b7-kube-api-access-96h8d\") pod \"barbican-db-sync-txcgm\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.212287 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-n95ss"] Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.213306 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.215403 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.215447 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.216781 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-qxljb" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.226869 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-n95ss"] Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.246010 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.293497 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdtf9\" (UniqueName: \"kubernetes.io/projected/244dbbcd-798d-4069-8f37-83a0391a98d6-kube-api-access-xdtf9\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.293612 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-config\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.293957 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-combined-ca-bundle\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.396155 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-combined-ca-bundle\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.397006 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdtf9\" (UniqueName: \"kubernetes.io/projected/244dbbcd-798d-4069-8f37-83a0391a98d6-kube-api-access-xdtf9\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.397080 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-config\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.403126 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-combined-ca-bundle\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.404230 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-config\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.418379 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdtf9\" (UniqueName: \"kubernetes.io/projected/244dbbcd-798d-4069-8f37-83a0391a98d6-kube-api-access-xdtf9\") pod \"neutron-db-sync-n95ss\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.536179 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.558262 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.573100 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708259 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-sb\") pod \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708677 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-scripts\") pod \"06365e70-4753-4473-a07e-e35418124a5c\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708738 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-dns-svc\") pod \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708760 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-fernet-keys\") pod \"06365e70-4753-4473-a07e-e35418124a5c\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708817 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-credential-keys\") pod \"06365e70-4753-4473-a07e-e35418124a5c\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708832 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-combined-ca-bundle\") pod \"06365e70-4753-4473-a07e-e35418124a5c\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708870 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-config\") pod \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708909 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-nb\") pod \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708930 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqz6n\" (UniqueName: \"kubernetes.io/projected/06365e70-4753-4473-a07e-e35418124a5c-kube-api-access-wqz6n\") pod \"06365e70-4753-4473-a07e-e35418124a5c\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708953 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-config-data\") pod \"06365e70-4753-4473-a07e-e35418124a5c\" (UID: \"06365e70-4753-4473-a07e-e35418124a5c\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.708983 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn6sk\" (UniqueName: \"kubernetes.io/projected/e256eb2e-b0cc-4860-bf46-558c5ddbb512-kube-api-access-xn6sk\") pod \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\" (UID: \"e256eb2e-b0cc-4860-bf46-558c5ddbb512\") " Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.716777 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-scripts" (OuterVolumeSpecName: "scripts") pod "06365e70-4753-4473-a07e-e35418124a5c" (UID: "06365e70-4753-4473-a07e-e35418124a5c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.717210 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e256eb2e-b0cc-4860-bf46-558c5ddbb512-kube-api-access-xn6sk" (OuterVolumeSpecName: "kube-api-access-xn6sk") pod "e256eb2e-b0cc-4860-bf46-558c5ddbb512" (UID: "e256eb2e-b0cc-4860-bf46-558c5ddbb512"). InnerVolumeSpecName "kube-api-access-xn6sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.719404 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "06365e70-4753-4473-a07e-e35418124a5c" (UID: "06365e70-4753-4473-a07e-e35418124a5c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.721089 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "06365e70-4753-4473-a07e-e35418124a5c" (UID: "06365e70-4753-4473-a07e-e35418124a5c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.723047 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06365e70-4753-4473-a07e-e35418124a5c-kube-api-access-wqz6n" (OuterVolumeSpecName: "kube-api-access-wqz6n") pod "06365e70-4753-4473-a07e-e35418124a5c" (UID: "06365e70-4753-4473-a07e-e35418124a5c"). InnerVolumeSpecName "kube-api-access-wqz6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.741580 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06365e70-4753-4473-a07e-e35418124a5c" (UID: "06365e70-4753-4473-a07e-e35418124a5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.744074 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-config-data" (OuterVolumeSpecName: "config-data") pod "06365e70-4753-4473-a07e-e35418124a5c" (UID: "06365e70-4753-4473-a07e-e35418124a5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.760897 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-config" (OuterVolumeSpecName: "config") pod "e256eb2e-b0cc-4860-bf46-558c5ddbb512" (UID: "e256eb2e-b0cc-4860-bf46-558c5ddbb512"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.762281 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e256eb2e-b0cc-4860-bf46-558c5ddbb512" (UID: "e256eb2e-b0cc-4860-bf46-558c5ddbb512"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.769248 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e256eb2e-b0cc-4860-bf46-558c5ddbb512" (UID: "e256eb2e-b0cc-4860-bf46-558c5ddbb512"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.771229 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e256eb2e-b0cc-4860-bf46-558c5ddbb512" (UID: "e256eb2e-b0cc-4860-bf46-558c5ddbb512"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.778857 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5xv4s"] Oct 11 03:03:07 crc kubenswrapper[4953]: W1011 03:03:07.811321 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37b17e82_1098_40f6_9a8b_ff2e863e5559.slice/crio-581788e21a214c3e2b0982ce3d8bbb11b9224642ee027bebad4d68ec38fa3c66 WatchSource:0}: Error finding container 581788e21a214c3e2b0982ce3d8bbb11b9224642ee027bebad4d68ec38fa3c66: Status 404 returned error can't find the container with id 581788e21a214c3e2b0982ce3d8bbb11b9224642ee027bebad4d68ec38fa3c66 Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811481 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811655 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811666 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811676 4953 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811686 4953 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811694 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811709 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811717 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e256eb2e-b0cc-4860-bf46-558c5ddbb512-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811727 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqz6n\" (UniqueName: \"kubernetes.io/projected/06365e70-4753-4473-a07e-e35418124a5c-kube-api-access-wqz6n\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811737 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06365e70-4753-4473-a07e-e35418124a5c-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.811747 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn6sk\" (UniqueName: \"kubernetes.io/projected/e256eb2e-b0cc-4860-bf46-558c5ddbb512-kube-api-access-xn6sk\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:07 crc kubenswrapper[4953]: I1011 03:03:07.901062 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-txcgm"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.043774 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerStarted","Data":"2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.045434 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5xv4s" event={"ID":"37b17e82-1098-40f6-9a8b-ff2e863e5559","Type":"ContainerStarted","Data":"581788e21a214c3e2b0982ce3d8bbb11b9224642ee027bebad4d68ec38fa3c66"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.047113 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-4nkqg" event={"ID":"06365e70-4753-4473-a07e-e35418124a5c","Type":"ContainerDied","Data":"1f7c39115678b9992b733e2306c4643fc5f40f206b0e4fd268bdb99964cef33d"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.047146 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f7c39115678b9992b733e2306c4643fc5f40f206b0e4fd268bdb99964cef33d" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.047215 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-4nkqg" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.051242 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2wgl5" event={"ID":"421c9157-2796-4c8a-84db-ae0a142d1155","Type":"ContainerStarted","Data":"1dc7ad40b3d4ae37563976f780b83bae6c09e17f3358a35dd38f71a71a23e234"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.052553 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-txcgm" event={"ID":"a800d96f-d6e6-4698-8da8-a27dab6454b7","Type":"ContainerStarted","Data":"1d7591e2c33bc4768905d4af6fb49d6b5fb58a78e354c50e92217387330d0add"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.054317 4953 generic.go:334] "Generic (PLEG): container finished" podID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerID="87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119" exitCode=0 Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.054362 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" event={"ID":"e256eb2e-b0cc-4860-bf46-558c5ddbb512","Type":"ContainerDied","Data":"87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.054389 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" event={"ID":"e256eb2e-b0cc-4860-bf46-558c5ddbb512","Type":"ContainerDied","Data":"95b8214a6956434d8fdab1364231757dc52d885f429f60d53259b2231f057a75"} Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.054410 4953 scope.go:117] "RemoveContainer" containerID="87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.054544 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-z6xn5" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.070756 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-2wgl5" podStartSLOduration=2.2906449269999998 podStartE2EDuration="12.070738106s" podCreationTimestamp="2025-10-11 03:02:56 +0000 UTC" firstStartedPulling="2025-10-11 03:02:57.535792905 +0000 UTC m=+988.468880549" lastFinishedPulling="2025-10-11 03:03:07.315886084 +0000 UTC m=+998.248973728" observedRunningTime="2025-10-11 03:03:08.067170475 +0000 UTC m=+999.000258119" watchObservedRunningTime="2025-10-11 03:03:08.070738106 +0000 UTC m=+999.003825750" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.078446 4953 scope.go:117] "RemoveContainer" containerID="3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd" Oct 11 03:03:08 crc kubenswrapper[4953]: W1011 03:03:08.095802 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod244dbbcd_798d_4069_8f37_83a0391a98d6.slice/crio-999aa2286d7e0ea274024224679f1e8228bd5abf57dbff20973eba72e22e9329 WatchSource:0}: Error finding container 999aa2286d7e0ea274024224679f1e8228bd5abf57dbff20973eba72e22e9329: Status 404 returned error can't find the container with id 999aa2286d7e0ea274024224679f1e8228bd5abf57dbff20973eba72e22e9329 Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.106771 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-n95ss"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.106882 4953 scope.go:117] "RemoveContainer" containerID="87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119" Oct 11 03:03:08 crc kubenswrapper[4953]: E1011 03:03:08.108785 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119\": container with ID starting with 87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119 not found: ID does not exist" containerID="87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.108829 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119"} err="failed to get container status \"87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119\": rpc error: code = NotFound desc = could not find container \"87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119\": container with ID starting with 87795b327f17ad9687ff4cc48d190680e51c06dcfa4d2b4a74e5299954ddf119 not found: ID does not exist" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.108858 4953 scope.go:117] "RemoveContainer" containerID="3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd" Oct 11 03:03:08 crc kubenswrapper[4953]: E1011 03:03:08.112749 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd\": container with ID starting with 3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd not found: ID does not exist" containerID="3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.112777 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd"} err="failed to get container status \"3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd\": rpc error: code = NotFound desc = could not find container \"3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd\": container with ID starting with 3480829e957b095e2232973362bc93329025c91c6edad20d9fd0b4d99b1ae7fd not found: ID does not exist" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.116652 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-z6xn5"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.137125 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-z6xn5"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.624666 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-4nkqg"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.630804 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-4nkqg"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.716715 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-zkswt"] Oct 11 03:03:08 crc kubenswrapper[4953]: E1011 03:03:08.717257 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06365e70-4753-4473-a07e-e35418124a5c" containerName="keystone-bootstrap" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.717274 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="06365e70-4753-4473-a07e-e35418124a5c" containerName="keystone-bootstrap" Oct 11 03:03:08 crc kubenswrapper[4953]: E1011 03:03:08.717292 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerName="dnsmasq-dns" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.717298 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerName="dnsmasq-dns" Oct 11 03:03:08 crc kubenswrapper[4953]: E1011 03:03:08.717307 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerName="init" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.717314 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerName="init" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.717515 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="06365e70-4753-4473-a07e-e35418124a5c" containerName="keystone-bootstrap" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.717528 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" containerName="dnsmasq-dns" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.718520 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.720555 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.720913 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cz5lv" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.721159 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.721978 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.732304 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zkswt"] Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.843042 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-credential-keys\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.843092 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-scripts\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.843111 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-config-data\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.843147 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mzrq\" (UniqueName: \"kubernetes.io/projected/84b8eec5-36ec-46dd-a308-f9d2103cb125-kube-api-access-9mzrq\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.843182 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-fernet-keys\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.843198 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-combined-ca-bundle\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.945702 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-credential-keys\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.945857 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-scripts\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.945898 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-config-data\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.945983 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mzrq\" (UniqueName: \"kubernetes.io/projected/84b8eec5-36ec-46dd-a308-f9d2103cb125-kube-api-access-9mzrq\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.946078 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-fernet-keys\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.946113 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-combined-ca-bundle\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.955149 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-credential-keys\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.956475 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-fernet-keys\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.957043 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-scripts\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.957466 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-config-data\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.960828 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-combined-ca-bundle\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:08 crc kubenswrapper[4953]: I1011 03:03:08.971682 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mzrq\" (UniqueName: \"kubernetes.io/projected/84b8eec5-36ec-46dd-a308-f9d2103cb125-kube-api-access-9mzrq\") pod \"keystone-bootstrap-zkswt\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.034968 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.073014 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n95ss" event={"ID":"244dbbcd-798d-4069-8f37-83a0391a98d6","Type":"ContainerStarted","Data":"1cba574eb5163399c111c61c5e2ec974e04c14a6e9ed020aca795bfaa6ca676c"} Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.073074 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n95ss" event={"ID":"244dbbcd-798d-4069-8f37-83a0391a98d6","Type":"ContainerStarted","Data":"999aa2286d7e0ea274024224679f1e8228bd5abf57dbff20973eba72e22e9329"} Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.095881 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-n95ss" podStartSLOduration=2.095864029 podStartE2EDuration="2.095864029s" podCreationTimestamp="2025-10-11 03:03:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:09.091124668 +0000 UTC m=+1000.024212352" watchObservedRunningTime="2025-10-11 03:03:09.095864029 +0000 UTC m=+1000.028951673" Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.485344 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zkswt"] Oct 11 03:03:09 crc kubenswrapper[4953]: W1011 03:03:09.501483 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84b8eec5_36ec_46dd_a308_f9d2103cb125.slice/crio-f8ac345fd3fafd8cb098a15cb18a569e2dd4ab39294f57802bcdb0bd8031da69 WatchSource:0}: Error finding container f8ac345fd3fafd8cb098a15cb18a569e2dd4ab39294f57802bcdb0bd8031da69: Status 404 returned error can't find the container with id f8ac345fd3fafd8cb098a15cb18a569e2dd4ab39294f57802bcdb0bd8031da69 Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.809331 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06365e70-4753-4473-a07e-e35418124a5c" path="/var/lib/kubelet/pods/06365e70-4753-4473-a07e-e35418124a5c/volumes" Oct 11 03:03:09 crc kubenswrapper[4953]: I1011 03:03:09.810148 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e256eb2e-b0cc-4860-bf46-558c5ddbb512" path="/var/lib/kubelet/pods/e256eb2e-b0cc-4860-bf46-558c5ddbb512/volumes" Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.087763 4953 generic.go:334] "Generic (PLEG): container finished" podID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" containerID="27eaa366ade9b3131fe3659b6c21d0deba1eea6513172aaa569b58b77ba2c2ee" exitCode=0 Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.087847 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pkkj5" event={"ID":"485ebf6f-d6da-48bf-9d6a-cb353d9082f5","Type":"ContainerDied","Data":"27eaa366ade9b3131fe3659b6c21d0deba1eea6513172aaa569b58b77ba2c2ee"} Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.090440 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerStarted","Data":"083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c"} Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.115301 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zkswt" event={"ID":"84b8eec5-36ec-46dd-a308-f9d2103cb125","Type":"ContainerStarted","Data":"c07062dbee10f578669ce815d4910a8c77159505daef11ca4104593195fab831"} Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.115351 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zkswt" event={"ID":"84b8eec5-36ec-46dd-a308-f9d2103cb125","Type":"ContainerStarted","Data":"f8ac345fd3fafd8cb098a15cb18a569e2dd4ab39294f57802bcdb0bd8031da69"} Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.118852 4953 generic.go:334] "Generic (PLEG): container finished" podID="421c9157-2796-4c8a-84db-ae0a142d1155" containerID="1dc7ad40b3d4ae37563976f780b83bae6c09e17f3358a35dd38f71a71a23e234" exitCode=0 Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.118933 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2wgl5" event={"ID":"421c9157-2796-4c8a-84db-ae0a142d1155","Type":"ContainerDied","Data":"1dc7ad40b3d4ae37563976f780b83bae6c09e17f3358a35dd38f71a71a23e234"} Oct 11 03:03:10 crc kubenswrapper[4953]: I1011 03:03:10.133931 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-zkswt" podStartSLOduration=2.133913292 podStartE2EDuration="2.133913292s" podCreationTimestamp="2025-10-11 03:03:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:10.133381018 +0000 UTC m=+1001.066468672" watchObservedRunningTime="2025-10-11 03:03:10.133913292 +0000 UTC m=+1001.067000936" Oct 11 03:03:14 crc kubenswrapper[4953]: I1011 03:03:14.149801 4953 generic.go:334] "Generic (PLEG): container finished" podID="84b8eec5-36ec-46dd-a308-f9d2103cb125" containerID="c07062dbee10f578669ce815d4910a8c77159505daef11ca4104593195fab831" exitCode=0 Oct 11 03:03:14 crc kubenswrapper[4953]: I1011 03:03:14.149927 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zkswt" event={"ID":"84b8eec5-36ec-46dd-a308-f9d2103cb125","Type":"ContainerDied","Data":"c07062dbee10f578669ce815d4910a8c77159505daef11ca4104593195fab831"} Oct 11 03:03:15 crc kubenswrapper[4953]: I1011 03:03:15.816233 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:15 crc kubenswrapper[4953]: I1011 03:03:15.828606 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2wgl5" Oct 11 03:03:15 crc kubenswrapper[4953]: I1011 03:03:15.889530 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pkkj5" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.007999 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-config-data\") pod \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008034 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mzrq\" (UniqueName: \"kubernetes.io/projected/84b8eec5-36ec-46dd-a308-f9d2103cb125-kube-api-access-9mzrq\") pod \"84b8eec5-36ec-46dd-a308-f9d2103cb125\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008079 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-credential-keys\") pod \"84b8eec5-36ec-46dd-a308-f9d2103cb125\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008131 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-combined-ca-bundle\") pod \"84b8eec5-36ec-46dd-a308-f9d2103cb125\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008175 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-combined-ca-bundle\") pod \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008199 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-scripts\") pod \"421c9157-2796-4c8a-84db-ae0a142d1155\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008243 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-config-data\") pod \"84b8eec5-36ec-46dd-a308-f9d2103cb125\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008290 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/421c9157-2796-4c8a-84db-ae0a142d1155-logs\") pod \"421c9157-2796-4c8a-84db-ae0a142d1155\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008309 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d5v6\" (UniqueName: \"kubernetes.io/projected/421c9157-2796-4c8a-84db-ae0a142d1155-kube-api-access-4d5v6\") pod \"421c9157-2796-4c8a-84db-ae0a142d1155\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008330 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-426ss\" (UniqueName: \"kubernetes.io/projected/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-kube-api-access-426ss\") pod \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008384 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-fernet-keys\") pod \"84b8eec5-36ec-46dd-a308-f9d2103cb125\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008412 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-combined-ca-bundle\") pod \"421c9157-2796-4c8a-84db-ae0a142d1155\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008451 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-scripts\") pod \"84b8eec5-36ec-46dd-a308-f9d2103cb125\" (UID: \"84b8eec5-36ec-46dd-a308-f9d2103cb125\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008479 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-config-data\") pod \"421c9157-2796-4c8a-84db-ae0a142d1155\" (UID: \"421c9157-2796-4c8a-84db-ae0a142d1155\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.008516 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-db-sync-config-data\") pod \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\" (UID: \"485ebf6f-d6da-48bf-9d6a-cb353d9082f5\") " Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.010912 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/421c9157-2796-4c8a-84db-ae0a142d1155-logs" (OuterVolumeSpecName: "logs") pod "421c9157-2796-4c8a-84db-ae0a142d1155" (UID: "421c9157-2796-4c8a-84db-ae0a142d1155"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.014798 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "485ebf6f-d6da-48bf-9d6a-cb353d9082f5" (UID: "485ebf6f-d6da-48bf-9d6a-cb353d9082f5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.015875 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-scripts" (OuterVolumeSpecName: "scripts") pod "421c9157-2796-4c8a-84db-ae0a142d1155" (UID: "421c9157-2796-4c8a-84db-ae0a142d1155"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.015882 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "84b8eec5-36ec-46dd-a308-f9d2103cb125" (UID: "84b8eec5-36ec-46dd-a308-f9d2103cb125"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.015894 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-kube-api-access-426ss" (OuterVolumeSpecName: "kube-api-access-426ss") pod "485ebf6f-d6da-48bf-9d6a-cb353d9082f5" (UID: "485ebf6f-d6da-48bf-9d6a-cb353d9082f5"). InnerVolumeSpecName "kube-api-access-426ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.024051 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-scripts" (OuterVolumeSpecName: "scripts") pod "84b8eec5-36ec-46dd-a308-f9d2103cb125" (UID: "84b8eec5-36ec-46dd-a308-f9d2103cb125"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.024815 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84b8eec5-36ec-46dd-a308-f9d2103cb125-kube-api-access-9mzrq" (OuterVolumeSpecName: "kube-api-access-9mzrq") pod "84b8eec5-36ec-46dd-a308-f9d2103cb125" (UID: "84b8eec5-36ec-46dd-a308-f9d2103cb125"). InnerVolumeSpecName "kube-api-access-9mzrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.031800 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "84b8eec5-36ec-46dd-a308-f9d2103cb125" (UID: "84b8eec5-36ec-46dd-a308-f9d2103cb125"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.038866 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "421c9157-2796-4c8a-84db-ae0a142d1155" (UID: "421c9157-2796-4c8a-84db-ae0a142d1155"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.042758 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "485ebf6f-d6da-48bf-9d6a-cb353d9082f5" (UID: "485ebf6f-d6da-48bf-9d6a-cb353d9082f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.049848 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84b8eec5-36ec-46dd-a308-f9d2103cb125" (UID: "84b8eec5-36ec-46dd-a308-f9d2103cb125"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.052449 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/421c9157-2796-4c8a-84db-ae0a142d1155-kube-api-access-4d5v6" (OuterVolumeSpecName: "kube-api-access-4d5v6") pod "421c9157-2796-4c8a-84db-ae0a142d1155" (UID: "421c9157-2796-4c8a-84db-ae0a142d1155"). InnerVolumeSpecName "kube-api-access-4d5v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.067034 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-config-data" (OuterVolumeSpecName: "config-data") pod "421c9157-2796-4c8a-84db-ae0a142d1155" (UID: "421c9157-2796-4c8a-84db-ae0a142d1155"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.068859 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-config-data" (OuterVolumeSpecName: "config-data") pod "84b8eec5-36ec-46dd-a308-f9d2103cb125" (UID: "84b8eec5-36ec-46dd-a308-f9d2103cb125"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.070840 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-config-data" (OuterVolumeSpecName: "config-data") pod "485ebf6f-d6da-48bf-9d6a-cb353d9082f5" (UID: "485ebf6f-d6da-48bf-9d6a-cb353d9082f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110721 4953 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110752 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110773 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mzrq\" (UniqueName: \"kubernetes.io/projected/84b8eec5-36ec-46dd-a308-f9d2103cb125-kube-api-access-9mzrq\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110785 4953 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110794 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110802 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110810 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110819 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110829 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/421c9157-2796-4c8a-84db-ae0a142d1155-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110837 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d5v6\" (UniqueName: \"kubernetes.io/projected/421c9157-2796-4c8a-84db-ae0a142d1155-kube-api-access-4d5v6\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110846 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-426ss\" (UniqueName: \"kubernetes.io/projected/485ebf6f-d6da-48bf-9d6a-cb353d9082f5-kube-api-access-426ss\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110854 4953 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110862 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110870 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84b8eec5-36ec-46dd-a308-f9d2103cb125-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.110878 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/421c9157-2796-4c8a-84db-ae0a142d1155-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.169323 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zkswt" event={"ID":"84b8eec5-36ec-46dd-a308-f9d2103cb125","Type":"ContainerDied","Data":"f8ac345fd3fafd8cb098a15cb18a569e2dd4ab39294f57802bcdb0bd8031da69"} Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.169382 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8ac345fd3fafd8cb098a15cb18a569e2dd4ab39294f57802bcdb0bd8031da69" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.169346 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zkswt" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.171972 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-2wgl5" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.171974 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-2wgl5" event={"ID":"421c9157-2796-4c8a-84db-ae0a142d1155","Type":"ContainerDied","Data":"f5842fc0549a7d36f5ad1b6f359aca588d49fa53668253587b274953d79eae96"} Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.172099 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5842fc0549a7d36f5ad1b6f359aca588d49fa53668253587b274953d79eae96" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.174727 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pkkj5" event={"ID":"485ebf6f-d6da-48bf-9d6a-cb353d9082f5","Type":"ContainerDied","Data":"8964670570d999e4d451afe4ba7949a17096865fac2090f232712a113ed53573"} Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.174758 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8964670570d999e4d451afe4ba7949a17096865fac2090f232712a113ed53573" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.174779 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pkkj5" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.259588 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6754fbff86-gtfd6"] Oct 11 03:03:16 crc kubenswrapper[4953]: E1011 03:03:16.260149 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="421c9157-2796-4c8a-84db-ae0a142d1155" containerName="placement-db-sync" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.260174 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="421c9157-2796-4c8a-84db-ae0a142d1155" containerName="placement-db-sync" Oct 11 03:03:16 crc kubenswrapper[4953]: E1011 03:03:16.260188 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" containerName="glance-db-sync" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.260197 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" containerName="glance-db-sync" Oct 11 03:03:16 crc kubenswrapper[4953]: E1011 03:03:16.260243 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84b8eec5-36ec-46dd-a308-f9d2103cb125" containerName="keystone-bootstrap" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.260253 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="84b8eec5-36ec-46dd-a308-f9d2103cb125" containerName="keystone-bootstrap" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.260418 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="84b8eec5-36ec-46dd-a308-f9d2103cb125" containerName="keystone-bootstrap" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.260436 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" containerName="glance-db-sync" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.260447 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="421c9157-2796-4c8a-84db-ae0a142d1155" containerName="placement-db-sync" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.261292 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.265196 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.265433 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.265576 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cz5lv" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.265744 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.266222 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.266684 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.270950 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6754fbff86-gtfd6"] Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.316960 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8hr5\" (UniqueName: \"kubernetes.io/projected/33a658a1-a813-4fca-bfec-0ed6aef2e124-kube-api-access-j8hr5\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317004 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-fernet-keys\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317030 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-credential-keys\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317054 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-internal-tls-certs\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317087 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-scripts\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317125 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-combined-ca-bundle\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317262 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-config-data\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.317401 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-public-tls-certs\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418766 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-config-data\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418821 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-public-tls-certs\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418855 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8hr5\" (UniqueName: \"kubernetes.io/projected/33a658a1-a813-4fca-bfec-0ed6aef2e124-kube-api-access-j8hr5\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418876 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-fernet-keys\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418908 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-credential-keys\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418939 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-internal-tls-certs\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.418976 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-scripts\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.419004 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-combined-ca-bundle\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.456306 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-combined-ca-bundle\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.456424 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-credential-keys\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.456973 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-scripts\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.457174 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-public-tls-certs\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.457972 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8hr5\" (UniqueName: \"kubernetes.io/projected/33a658a1-a813-4fca-bfec-0ed6aef2e124-kube-api-access-j8hr5\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.460421 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-config-data\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.462831 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-fernet-keys\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.464941 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33a658a1-a813-4fca-bfec-0ed6aef2e124-internal-tls-certs\") pod \"keystone-6754fbff86-gtfd6\" (UID: \"33a658a1-a813-4fca-bfec-0ed6aef2e124\") " pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.587108 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.933033 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-766db79b7b-s7l54"] Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.934904 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.942109 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.946315 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.946584 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.946798 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-755v9" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.946954 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 11 03:03:16 crc kubenswrapper[4953]: I1011 03:03:16.952798 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-766db79b7b-s7l54"] Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028607 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-config-data\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028695 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-public-tls-certs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028823 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-combined-ca-bundle\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028851 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-internal-tls-certs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028870 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqz7l\" (UniqueName: \"kubernetes.io/projected/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-kube-api-access-bqz7l\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028902 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-scripts\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.028948 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-logs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.129911 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-internal-tls-certs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.129950 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqz7l\" (UniqueName: \"kubernetes.io/projected/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-kube-api-access-bqz7l\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.129976 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-scripts\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.130013 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-logs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.130056 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-config-data\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.130077 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-public-tls-certs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.130136 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-combined-ca-bundle\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.130881 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-logs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.147207 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-scripts\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.147513 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-public-tls-certs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.147783 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-combined-ca-bundle\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.164303 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-internal-tls-certs\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.166919 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-config-data\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.169459 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqz7l\" (UniqueName: \"kubernetes.io/projected/d6fea745-5eaa-47d2-b039-7ef9e1efd8f5-kube-api-access-bqz7l\") pod \"placement-766db79b7b-s7l54\" (UID: \"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5\") " pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.257527 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.297537 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7987f74bbc-dcplw"] Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.299099 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.304243 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7987f74bbc-dcplw"] Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.434031 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-dns-svc\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.434086 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x92nw\" (UniqueName: \"kubernetes.io/projected/ed7285fa-8a49-4586-997c-e546a78ac436-kube-api-access-x92nw\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.434130 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-config\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.434492 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-sb\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.434670 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-nb\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.536817 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-dns-svc\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.536883 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x92nw\" (UniqueName: \"kubernetes.io/projected/ed7285fa-8a49-4586-997c-e546a78ac436-kube-api-access-x92nw\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.536931 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-config\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.536964 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-sb\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.537008 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-nb\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.538010 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-dns-svc\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.538639 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-sb\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.539187 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-nb\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.539758 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-config\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.564654 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x92nw\" (UniqueName: \"kubernetes.io/projected/ed7285fa-8a49-4586-997c-e546a78ac436-kube-api-access-x92nw\") pod \"dnsmasq-dns-7987f74bbc-dcplw\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:17 crc kubenswrapper[4953]: I1011 03:03:17.615755 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:28 crc kubenswrapper[4953]: E1011 03:03:28.379928 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 11 03:03:28 crc kubenswrapper[4953]: E1011 03:03:28.380909 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m64zw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-5xv4s_openstack(37b17e82-1098-40f6-9a8b-ff2e863e5559): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 03:03:28 crc kubenswrapper[4953]: E1011 03:03:28.382184 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-5xv4s" podUID="37b17e82-1098-40f6-9a8b-ff2e863e5559" Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:28.859188 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-766db79b7b-s7l54"] Oct 11 03:03:29 crc kubenswrapper[4953]: W1011 03:03:28.868765 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6fea745_5eaa_47d2_b039_7ef9e1efd8f5.slice/crio-b8fb8ae850b62c91ed494cb6b5677d18e8d48bca65d5910a4ec0e95828b7585b WatchSource:0}: Error finding container b8fb8ae850b62c91ed494cb6b5677d18e8d48bca65d5910a4ec0e95828b7585b: Status 404 returned error can't find the container with id b8fb8ae850b62c91ed494cb6b5677d18e8d48bca65d5910a4ec0e95828b7585b Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:28.939450 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6754fbff86-gtfd6"] Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.038229 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7987f74bbc-dcplw"] Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.318469 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6754fbff86-gtfd6" event={"ID":"33a658a1-a813-4fca-bfec-0ed6aef2e124","Type":"ContainerStarted","Data":"e2a54d460d70844f5df05e79501b17d9520e4dcfa0f84faa04f4bfcf35f56181"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.318515 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6754fbff86-gtfd6" event={"ID":"33a658a1-a813-4fca-bfec-0ed6aef2e124","Type":"ContainerStarted","Data":"cca57bdc1ab11add9639253034df5ac5a77e8132d9da06a554d81e90240565c5"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.319783 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.329359 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerStarted","Data":"59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.331885 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-766db79b7b-s7l54" event={"ID":"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5","Type":"ContainerStarted","Data":"c35c8b5fee5c6cd068e6c3d6d3fd8f158251d649a220227c42cb11acd735d315"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.331937 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-766db79b7b-s7l54" event={"ID":"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5","Type":"ContainerStarted","Data":"b8fb8ae850b62c91ed494cb6b5677d18e8d48bca65d5910a4ec0e95828b7585b"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.333394 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-txcgm" event={"ID":"a800d96f-d6e6-4698-8da8-a27dab6454b7","Type":"ContainerStarted","Data":"72f93a1505b2b1b33f76372870851c9f47d5c1d8fd28e42def6851caab7d88be"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.338349 4953 generic.go:334] "Generic (PLEG): container finished" podID="ed7285fa-8a49-4586-997c-e546a78ac436" containerID="4bdc404244e8c8339f5071768e3a229ae3c2d3317b71e7fc0e3327d0061724bd" exitCode=0 Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.338755 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" event={"ID":"ed7285fa-8a49-4586-997c-e546a78ac436","Type":"ContainerDied","Data":"4bdc404244e8c8339f5071768e3a229ae3c2d3317b71e7fc0e3327d0061724bd"} Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.338801 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" event={"ID":"ed7285fa-8a49-4586-997c-e546a78ac436","Type":"ContainerStarted","Data":"a319bfaebcb7822e02d25ab0aa08027332caec8e8df3491692e5ed3073cf2a94"} Oct 11 03:03:29 crc kubenswrapper[4953]: E1011 03:03:29.339879 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-5xv4s" podUID="37b17e82-1098-40f6-9a8b-ff2e863e5559" Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.388975 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6754fbff86-gtfd6" podStartSLOduration=13.388945901 podStartE2EDuration="13.388945901s" podCreationTimestamp="2025-10-11 03:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:29.353091156 +0000 UTC m=+1020.286178800" watchObservedRunningTime="2025-10-11 03:03:29.388945901 +0000 UTC m=+1020.322033545" Oct 11 03:03:29 crc kubenswrapper[4953]: I1011 03:03:29.430692 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-txcgm" podStartSLOduration=2.961207712 podStartE2EDuration="23.430661226s" podCreationTimestamp="2025-10-11 03:03:06 +0000 UTC" firstStartedPulling="2025-10-11 03:03:07.900943231 +0000 UTC m=+998.834030875" lastFinishedPulling="2025-10-11 03:03:28.370396745 +0000 UTC m=+1019.303484389" observedRunningTime="2025-10-11 03:03:29.422526138 +0000 UTC m=+1020.355613782" watchObservedRunningTime="2025-10-11 03:03:29.430661226 +0000 UTC m=+1020.363748870" Oct 11 03:03:30 crc kubenswrapper[4953]: I1011 03:03:30.350171 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-766db79b7b-s7l54" event={"ID":"d6fea745-5eaa-47d2-b039-7ef9e1efd8f5","Type":"ContainerStarted","Data":"97e0636e9fb0977f67bfc5abe320722d4e0aeefeed70b09b129078a17bce7b4b"} Oct 11 03:03:30 crc kubenswrapper[4953]: I1011 03:03:30.351754 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:30 crc kubenswrapper[4953]: I1011 03:03:30.351770 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:30 crc kubenswrapper[4953]: I1011 03:03:30.353697 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" event={"ID":"ed7285fa-8a49-4586-997c-e546a78ac436","Type":"ContainerStarted","Data":"3e668ce6ea362a40526cc9203aaac5b526d3ae2e0859f26e44648fcadc79cf5c"} Oct 11 03:03:30 crc kubenswrapper[4953]: I1011 03:03:30.400447 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" podStartSLOduration=13.400432676 podStartE2EDuration="13.400432676s" podCreationTimestamp="2025-10-11 03:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:30.396714911 +0000 UTC m=+1021.329802565" watchObservedRunningTime="2025-10-11 03:03:30.400432676 +0000 UTC m=+1021.333520320" Oct 11 03:03:30 crc kubenswrapper[4953]: I1011 03:03:30.409633 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-766db79b7b-s7l54" podStartSLOduration=14.40961749 podStartE2EDuration="14.40961749s" podCreationTimestamp="2025-10-11 03:03:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:30.380293702 +0000 UTC m=+1021.313381366" watchObservedRunningTime="2025-10-11 03:03:30.40961749 +0000 UTC m=+1021.342705134" Oct 11 03:03:31 crc kubenswrapper[4953]: I1011 03:03:31.365783 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.414495 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerStarted","Data":"f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2"} Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.415323 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.414788 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="sg-core" containerID="cri-o://59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd" gracePeriod=30 Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.414763 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="proxy-httpd" containerID="cri-o://f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2" gracePeriod=30 Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.414816 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-notification-agent" containerID="cri-o://083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c" gracePeriod=30 Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.414772 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-central-agent" containerID="cri-o://2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c" gracePeriod=30 Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.423037 4953 generic.go:334] "Generic (PLEG): container finished" podID="a800d96f-d6e6-4698-8da8-a27dab6454b7" containerID="72f93a1505b2b1b33f76372870851c9f47d5c1d8fd28e42def6851caab7d88be" exitCode=0 Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.423091 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-txcgm" event={"ID":"a800d96f-d6e6-4698-8da8-a27dab6454b7","Type":"ContainerDied","Data":"72f93a1505b2b1b33f76372870851c9f47d5c1d8fd28e42def6851caab7d88be"} Oct 11 03:03:36 crc kubenswrapper[4953]: I1011 03:03:36.443159 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.851302671 podStartE2EDuration="40.443135894s" podCreationTimestamp="2025-10-11 03:02:56 +0000 UTC" firstStartedPulling="2025-10-11 03:02:57.293955321 +0000 UTC m=+988.227042965" lastFinishedPulling="2025-10-11 03:03:35.885788544 +0000 UTC m=+1026.818876188" observedRunningTime="2025-10-11 03:03:36.435598892 +0000 UTC m=+1027.368686566" watchObservedRunningTime="2025-10-11 03:03:36.443135894 +0000 UTC m=+1027.376223578" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.431505 4953 generic.go:334] "Generic (PLEG): container finished" podID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerID="f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2" exitCode=0 Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.431748 4953 generic.go:334] "Generic (PLEG): container finished" podID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerID="59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd" exitCode=2 Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.431762 4953 generic.go:334] "Generic (PLEG): container finished" podID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerID="2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c" exitCode=0 Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.431752 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerDied","Data":"f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2"} Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.431874 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerDied","Data":"59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd"} Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.431908 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerDied","Data":"2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c"} Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.618118 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.711141 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-745b9ddc8c-dzp8h"] Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.711409 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" containerName="dnsmasq-dns" containerID="cri-o://67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53" gracePeriod=10 Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.817901 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.855400 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96h8d\" (UniqueName: \"kubernetes.io/projected/a800d96f-d6e6-4698-8da8-a27dab6454b7-kube-api-access-96h8d\") pod \"a800d96f-d6e6-4698-8da8-a27dab6454b7\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.855491 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-combined-ca-bundle\") pod \"a800d96f-d6e6-4698-8da8-a27dab6454b7\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.855567 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-db-sync-config-data\") pod \"a800d96f-d6e6-4698-8da8-a27dab6454b7\" (UID: \"a800d96f-d6e6-4698-8da8-a27dab6454b7\") " Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.885459 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a800d96f-d6e6-4698-8da8-a27dab6454b7-kube-api-access-96h8d" (OuterVolumeSpecName: "kube-api-access-96h8d") pod "a800d96f-d6e6-4698-8da8-a27dab6454b7" (UID: "a800d96f-d6e6-4698-8da8-a27dab6454b7"). InnerVolumeSpecName "kube-api-access-96h8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.885781 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a800d96f-d6e6-4698-8da8-a27dab6454b7" (UID: "a800d96f-d6e6-4698-8da8-a27dab6454b7"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.897680 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a800d96f-d6e6-4698-8da8-a27dab6454b7" (UID: "a800d96f-d6e6-4698-8da8-a27dab6454b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.957509 4953 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.957563 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96h8d\" (UniqueName: \"kubernetes.io/projected/a800d96f-d6e6-4698-8da8-a27dab6454b7-kube-api-access-96h8d\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:37 crc kubenswrapper[4953]: I1011 03:03:37.957582 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a800d96f-d6e6-4698-8da8-a27dab6454b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.185731 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.366834 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-sb\") pod \"d0276760-d6d3-487c-bf61-561381a0f68b\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.366901 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-config\") pod \"d0276760-d6d3-487c-bf61-561381a0f68b\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.367017 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzw5k\" (UniqueName: \"kubernetes.io/projected/d0276760-d6d3-487c-bf61-561381a0f68b-kube-api-access-jzw5k\") pod \"d0276760-d6d3-487c-bf61-561381a0f68b\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.367081 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-nb\") pod \"d0276760-d6d3-487c-bf61-561381a0f68b\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.367166 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-dns-svc\") pod \"d0276760-d6d3-487c-bf61-561381a0f68b\" (UID: \"d0276760-d6d3-487c-bf61-561381a0f68b\") " Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.371632 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0276760-d6d3-487c-bf61-561381a0f68b-kube-api-access-jzw5k" (OuterVolumeSpecName: "kube-api-access-jzw5k") pod "d0276760-d6d3-487c-bf61-561381a0f68b" (UID: "d0276760-d6d3-487c-bf61-561381a0f68b"). InnerVolumeSpecName "kube-api-access-jzw5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.411089 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d0276760-d6d3-487c-bf61-561381a0f68b" (UID: "d0276760-d6d3-487c-bf61-561381a0f68b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.411113 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d0276760-d6d3-487c-bf61-561381a0f68b" (UID: "d0276760-d6d3-487c-bf61-561381a0f68b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.412188 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-config" (OuterVolumeSpecName: "config") pod "d0276760-d6d3-487c-bf61-561381a0f68b" (UID: "d0276760-d6d3-487c-bf61-561381a0f68b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.416345 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d0276760-d6d3-487c-bf61-561381a0f68b" (UID: "d0276760-d6d3-487c-bf61-561381a0f68b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.441976 4953 generic.go:334] "Generic (PLEG): container finished" podID="d0276760-d6d3-487c-bf61-561381a0f68b" containerID="67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53" exitCode=0 Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.442140 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.442186 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" event={"ID":"d0276760-d6d3-487c-bf61-561381a0f68b","Type":"ContainerDied","Data":"67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53"} Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.442249 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-745b9ddc8c-dzp8h" event={"ID":"d0276760-d6d3-487c-bf61-561381a0f68b","Type":"ContainerDied","Data":"52afd50afcdf268ced1ae4165ace597776f4e7542a07372332f6b7ccc12eac00"} Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.442304 4953 scope.go:117] "RemoveContainer" containerID="67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.446073 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-txcgm" event={"ID":"a800d96f-d6e6-4698-8da8-a27dab6454b7","Type":"ContainerDied","Data":"1d7591e2c33bc4768905d4af6fb49d6b5fb58a78e354c50e92217387330d0add"} Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.446233 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d7591e2c33bc4768905d4af6fb49d6b5fb58a78e354c50e92217387330d0add" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.446290 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-txcgm" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.472173 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.472202 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.472215 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzw5k\" (UniqueName: \"kubernetes.io/projected/d0276760-d6d3-487c-bf61-561381a0f68b-kube-api-access-jzw5k\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.472228 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.472240 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0276760-d6d3-487c-bf61-561381a0f68b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.475502 4953 scope.go:117] "RemoveContainer" containerID="53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.505700 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-745b9ddc8c-dzp8h"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.511425 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-745b9ddc8c-dzp8h"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.517693 4953 scope.go:117] "RemoveContainer" containerID="67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53" Oct 11 03:03:38 crc kubenswrapper[4953]: E1011 03:03:38.518173 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53\": container with ID starting with 67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53 not found: ID does not exist" containerID="67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.518203 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53"} err="failed to get container status \"67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53\": rpc error: code = NotFound desc = could not find container \"67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53\": container with ID starting with 67fbbf13159583ea6fdeee93791e895f41ee032cfa510471ba5f6f8ba1e61b53 not found: ID does not exist" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.518223 4953 scope.go:117] "RemoveContainer" containerID="53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690" Oct 11 03:03:38 crc kubenswrapper[4953]: E1011 03:03:38.520529 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690\": container with ID starting with 53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690 not found: ID does not exist" containerID="53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.520575 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690"} err="failed to get container status \"53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690\": rpc error: code = NotFound desc = could not find container \"53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690\": container with ID starting with 53ee881726441588274ead5b282b38e570e962d34e5beb27b4597e5e6d446690 not found: ID does not exist" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.813681 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-57d964cfc9-nsdbv"] Oct 11 03:03:38 crc kubenswrapper[4953]: E1011 03:03:38.814103 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a800d96f-d6e6-4698-8da8-a27dab6454b7" containerName="barbican-db-sync" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.814120 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a800d96f-d6e6-4698-8da8-a27dab6454b7" containerName="barbican-db-sync" Oct 11 03:03:38 crc kubenswrapper[4953]: E1011 03:03:38.814138 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" containerName="init" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.814145 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" containerName="init" Oct 11 03:03:38 crc kubenswrapper[4953]: E1011 03:03:38.814152 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" containerName="dnsmasq-dns" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.814159 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" containerName="dnsmasq-dns" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.814335 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a800d96f-d6e6-4698-8da8-a27dab6454b7" containerName="barbican-db-sync" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.814354 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" containerName="dnsmasq-dns" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.815280 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.818025 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.818379 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.822831 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-rsld9" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.834526 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-55994c8b46-9zhbn"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.836566 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.840788 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.843386 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-55994c8b46-9zhbn"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.855343 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-57d964cfc9-nsdbv"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882415 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-config-data-custom\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882544 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-config-data\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882580 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321fd9ba-894b-4da7-a3eb-a9052645f13b-logs\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882623 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-config-data\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882669 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz9pl\" (UniqueName: \"kubernetes.io/projected/bbf6cb99-5239-4d59-8710-ecb946f343ac-kube-api-access-cz9pl\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882724 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-config-data-custom\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882775 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-combined-ca-bundle\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882829 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn9mw\" (UniqueName: \"kubernetes.io/projected/321fd9ba-894b-4da7-a3eb-a9052645f13b-kube-api-access-mn9mw\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882858 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbf6cb99-5239-4d59-8710-ecb946f343ac-logs\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.882895 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-combined-ca-bundle\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.909281 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-699df9757c-q6p2b"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.911483 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.926381 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699df9757c-q6p2b"] Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.984912 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-config-data\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.984984 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321fd9ba-894b-4da7-a3eb-a9052645f13b-logs\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985013 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-config-data\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985053 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz9pl\" (UniqueName: \"kubernetes.io/projected/bbf6cb99-5239-4d59-8710-ecb946f343ac-kube-api-access-cz9pl\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985099 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-config-data-custom\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985132 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-combined-ca-bundle\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985170 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn9mw\" (UniqueName: \"kubernetes.io/projected/321fd9ba-894b-4da7-a3eb-a9052645f13b-kube-api-access-mn9mw\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985195 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbf6cb99-5239-4d59-8710-ecb946f343ac-logs\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985229 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-combined-ca-bundle\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.985313 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-config-data-custom\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.989525 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbf6cb99-5239-4d59-8710-ecb946f343ac-logs\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.989699 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/321fd9ba-894b-4da7-a3eb-a9052645f13b-logs\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.997275 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-combined-ca-bundle\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.997318 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-combined-ca-bundle\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.997324 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-config-data-custom\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.997988 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-config-data-custom\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:38 crc kubenswrapper[4953]: I1011 03:03:38.998229 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321fd9ba-894b-4da7-a3eb-a9052645f13b-config-data\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.013282 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbf6cb99-5239-4d59-8710-ecb946f343ac-config-data\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.017566 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn9mw\" (UniqueName: \"kubernetes.io/projected/321fd9ba-894b-4da7-a3eb-a9052645f13b-kube-api-access-mn9mw\") pod \"barbican-worker-57d964cfc9-nsdbv\" (UID: \"321fd9ba-894b-4da7-a3eb-a9052645f13b\") " pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.018876 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz9pl\" (UniqueName: \"kubernetes.io/projected/bbf6cb99-5239-4d59-8710-ecb946f343ac-kube-api-access-cz9pl\") pod \"barbican-keystone-listener-55994c8b46-9zhbn\" (UID: \"bbf6cb99-5239-4d59-8710-ecb946f343ac\") " pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.051860 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-67c7667658-mv75m"] Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.053822 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.055473 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.065917 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-67c7667658-mv75m"] Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.087925 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.087968 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ad8ed9-a225-4edf-90fe-620e277bcb63-logs\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088029 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd5jp\" (UniqueName: \"kubernetes.io/projected/2104fd51-0add-4da4-8df2-00b1befafe62-kube-api-access-sd5jp\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088049 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-dns-svc\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088106 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-sb\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088141 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqnc6\" (UniqueName: \"kubernetes.io/projected/e2ad8ed9-a225-4edf-90fe-620e277bcb63-kube-api-access-jqnc6\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088166 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-combined-ca-bundle\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088230 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-config\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088253 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data-custom\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.088289 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-nb\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.152197 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-57d964cfc9-nsdbv" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.161505 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189246 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189291 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ad8ed9-a225-4edf-90fe-620e277bcb63-logs\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189333 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd5jp\" (UniqueName: \"kubernetes.io/projected/2104fd51-0add-4da4-8df2-00b1befafe62-kube-api-access-sd5jp\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189353 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-dns-svc\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189382 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-sb\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189406 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqnc6\" (UniqueName: \"kubernetes.io/projected/e2ad8ed9-a225-4edf-90fe-620e277bcb63-kube-api-access-jqnc6\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189426 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-combined-ca-bundle\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189451 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-config\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189466 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data-custom\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.189496 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-nb\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.190049 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ad8ed9-a225-4edf-90fe-620e277bcb63-logs\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.190700 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-nb\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.190758 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-sb\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.190816 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-dns-svc\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.191431 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-config\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.195335 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data-custom\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.195401 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.195707 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-combined-ca-bundle\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.210489 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd5jp\" (UniqueName: \"kubernetes.io/projected/2104fd51-0add-4da4-8df2-00b1befafe62-kube-api-access-sd5jp\") pod \"dnsmasq-dns-699df9757c-q6p2b\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.213877 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqnc6\" (UniqueName: \"kubernetes.io/projected/e2ad8ed9-a225-4edf-90fe-620e277bcb63-kube-api-access-jqnc6\") pod \"barbican-api-67c7667658-mv75m\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.244764 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.377273 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.655130 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-57d964cfc9-nsdbv"] Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.666494 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-67c7667658-mv75m"] Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.699480 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-55994c8b46-9zhbn"] Oct 11 03:03:39 crc kubenswrapper[4953]: W1011 03:03:39.701487 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbf6cb99_5239_4d59_8710_ecb946f343ac.slice/crio-7d066932c65e8e9c46151dc6cdf9ab6465ca20bdd0202764a3e85582e80aa445 WatchSource:0}: Error finding container 7d066932c65e8e9c46151dc6cdf9ab6465ca20bdd0202764a3e85582e80aa445: Status 404 returned error can't find the container with id 7d066932c65e8e9c46151dc6cdf9ab6465ca20bdd0202764a3e85582e80aa445 Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.778559 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699df9757c-q6p2b"] Oct 11 03:03:39 crc kubenswrapper[4953]: W1011 03:03:39.783131 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2104fd51_0add_4da4_8df2_00b1befafe62.slice/crio-5dcb1d1a9fc645353a89cf287ca3e2efb15919bf7eac2fc1e9fb47dec6cc74da WatchSource:0}: Error finding container 5dcb1d1a9fc645353a89cf287ca3e2efb15919bf7eac2fc1e9fb47dec6cc74da: Status 404 returned error can't find the container with id 5dcb1d1a9fc645353a89cf287ca3e2efb15919bf7eac2fc1e9fb47dec6cc74da Oct 11 03:03:39 crc kubenswrapper[4953]: I1011 03:03:39.811036 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0276760-d6d3-487c-bf61-561381a0f68b" path="/var/lib/kubelet/pods/d0276760-d6d3-487c-bf61-561381a0f68b/volumes" Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.481721 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-57d964cfc9-nsdbv" event={"ID":"321fd9ba-894b-4da7-a3eb-a9052645f13b","Type":"ContainerStarted","Data":"f3b9f640b7d1f03d9129141f42f1f0c466f368300568a3e0ad54cff794f8ef5a"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.485417 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67c7667658-mv75m" event={"ID":"e2ad8ed9-a225-4edf-90fe-620e277bcb63","Type":"ContainerStarted","Data":"ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.485467 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67c7667658-mv75m" event={"ID":"e2ad8ed9-a225-4edf-90fe-620e277bcb63","Type":"ContainerStarted","Data":"a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.485479 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67c7667658-mv75m" event={"ID":"e2ad8ed9-a225-4edf-90fe-620e277bcb63","Type":"ContainerStarted","Data":"4f43b0c3fa8060e322d0d03b66a89ddb88e989c1ee8bb4f5f8199b4cd92be16f"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.485522 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.485535 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.490011 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" event={"ID":"bbf6cb99-5239-4d59-8710-ecb946f343ac","Type":"ContainerStarted","Data":"7d066932c65e8e9c46151dc6cdf9ab6465ca20bdd0202764a3e85582e80aa445"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.492344 4953 generic.go:334] "Generic (PLEG): container finished" podID="2104fd51-0add-4da4-8df2-00b1befafe62" containerID="343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d" exitCode=0 Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.492391 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" event={"ID":"2104fd51-0add-4da4-8df2-00b1befafe62","Type":"ContainerDied","Data":"343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.492417 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" event={"ID":"2104fd51-0add-4da4-8df2-00b1befafe62","Type":"ContainerStarted","Data":"5dcb1d1a9fc645353a89cf287ca3e2efb15919bf7eac2fc1e9fb47dec6cc74da"} Oct 11 03:03:40 crc kubenswrapper[4953]: I1011 03:03:40.525949 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-67c7667658-mv75m" podStartSLOduration=1.525906643 podStartE2EDuration="1.525906643s" podCreationTimestamp="2025-10-11 03:03:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:40.508653533 +0000 UTC m=+1031.441741207" watchObservedRunningTime="2025-10-11 03:03:40.525906643 +0000 UTC m=+1031.458994287" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.356997 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5ccd664cc4-t4w8m"] Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.358629 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.363387 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.363933 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.376842 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5ccd664cc4-t4w8m"] Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444723 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrvsw\" (UniqueName: \"kubernetes.io/projected/b9eef66c-5177-4fe9-922f-099e01797490-kube-api-access-nrvsw\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444785 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-config-data-custom\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444825 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-public-tls-certs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444875 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-internal-tls-certs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444935 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9eef66c-5177-4fe9-922f-099e01797490-logs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444962 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-config-data\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.444980 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-combined-ca-bundle\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.532756 4953 generic.go:334] "Generic (PLEG): container finished" podID="244dbbcd-798d-4069-8f37-83a0391a98d6" containerID="1cba574eb5163399c111c61c5e2ec974e04c14a6e9ed020aca795bfaa6ca676c" exitCode=0 Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.533205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n95ss" event={"ID":"244dbbcd-798d-4069-8f37-83a0391a98d6","Type":"ContainerDied","Data":"1cba574eb5163399c111c61c5e2ec974e04c14a6e9ed020aca795bfaa6ca676c"} Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.541715 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" event={"ID":"2104fd51-0add-4da4-8df2-00b1befafe62","Type":"ContainerStarted","Data":"7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7"} Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.541763 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.545976 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrvsw\" (UniqueName: \"kubernetes.io/projected/b9eef66c-5177-4fe9-922f-099e01797490-kube-api-access-nrvsw\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.546025 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-config-data-custom\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.546064 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-public-tls-certs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.546132 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-internal-tls-certs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.546196 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9eef66c-5177-4fe9-922f-099e01797490-logs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.546224 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-config-data\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.546238 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-combined-ca-bundle\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.547411 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9eef66c-5177-4fe9-922f-099e01797490-logs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.570683 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-combined-ca-bundle\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.571743 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-internal-tls-certs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.572025 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-public-tls-certs\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.582127 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrvsw\" (UniqueName: \"kubernetes.io/projected/b9eef66c-5177-4fe9-922f-099e01797490-kube-api-access-nrvsw\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.585241 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-config-data-custom\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.601387 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" podStartSLOduration=3.60136637 podStartE2EDuration="3.60136637s" podCreationTimestamp="2025-10-11 03:03:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:41.595931942 +0000 UTC m=+1032.529019586" watchObservedRunningTime="2025-10-11 03:03:41.60136637 +0000 UTC m=+1032.534454014" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.604086 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9eef66c-5177-4fe9-922f-099e01797490-config-data\") pod \"barbican-api-5ccd664cc4-t4w8m\" (UID: \"b9eef66c-5177-4fe9-922f-099e01797490\") " pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.687048 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:41 crc kubenswrapper[4953]: I1011 03:03:41.911259 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.052796 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-sg-core-conf-yaml\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.053489 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-run-httpd\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.053619 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5q8f\" (UniqueName: \"kubernetes.io/projected/739f1a09-0ddc-41e3-bfe8-f482621c83a4-kube-api-access-v5q8f\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.053653 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-log-httpd\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.053679 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-combined-ca-bundle\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.053751 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-config-data\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.053794 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-scripts\") pod \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\" (UID: \"739f1a09-0ddc-41e3-bfe8-f482621c83a4\") " Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.054698 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.055280 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.059674 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-scripts" (OuterVolumeSpecName: "scripts") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.059710 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/739f1a09-0ddc-41e3-bfe8-f482621c83a4-kube-api-access-v5q8f" (OuterVolumeSpecName: "kube-api-access-v5q8f") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "kube-api-access-v5q8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.087253 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.141230 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.144960 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5ccd664cc4-t4w8m"] Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.155862 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.155888 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.155899 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5q8f\" (UniqueName: \"kubernetes.io/projected/739f1a09-0ddc-41e3-bfe8-f482621c83a4-kube-api-access-v5q8f\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.155909 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/739f1a09-0ddc-41e3-bfe8-f482621c83a4-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.155918 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.155926 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.164987 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-config-data" (OuterVolumeSpecName: "config-data") pod "739f1a09-0ddc-41e3-bfe8-f482621c83a4" (UID: "739f1a09-0ddc-41e3-bfe8-f482621c83a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.257695 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/739f1a09-0ddc-41e3-bfe8-f482621c83a4-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.548412 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ccd664cc4-t4w8m" event={"ID":"b9eef66c-5177-4fe9-922f-099e01797490","Type":"ContainerStarted","Data":"e06877167c137e40339a5ee2999e286428505484fcf90987da39a3022b1e219d"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.548458 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ccd664cc4-t4w8m" event={"ID":"b9eef66c-5177-4fe9-922f-099e01797490","Type":"ContainerStarted","Data":"66c0c41b018dcd7bebd75e4543779a65aefd0dd676f8eb19e3f2afabfc2c0d37"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.551212 4953 generic.go:334] "Generic (PLEG): container finished" podID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerID="083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c" exitCode=0 Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.551269 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerDied","Data":"083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.551294 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"739f1a09-0ddc-41e3-bfe8-f482621c83a4","Type":"ContainerDied","Data":"fa342d7c42dba58e2848e0f31a211085932e11b925cb15a9958fac7a4358ab34"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.551311 4953 scope.go:117] "RemoveContainer" containerID="f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.551425 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.567652 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-57d964cfc9-nsdbv" event={"ID":"321fd9ba-894b-4da7-a3eb-a9052645f13b","Type":"ContainerStarted","Data":"167d2dd5c153d322867fb05d93a4a8a4eb8d831ed2d1e33c3db27f7c57ea1de4"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.567692 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-57d964cfc9-nsdbv" event={"ID":"321fd9ba-894b-4da7-a3eb-a9052645f13b","Type":"ContainerStarted","Data":"ed67d48ee92ae6a74d41cee5b9bfc9e4a71b2afac559701b07cfbd9d90cb0ba5"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.569894 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" event={"ID":"bbf6cb99-5239-4d59-8710-ecb946f343ac","Type":"ContainerStarted","Data":"82a4c9053760ad12b91c96b65a893b85504d68de8d2e8c42b24d80620a9c42a1"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.569945 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" event={"ID":"bbf6cb99-5239-4d59-8710-ecb946f343ac","Type":"ContainerStarted","Data":"771c2cef5e7d318cb32a7129cc8948bba97a98e805e90d0033f2b44bbedfdbde"} Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.596093 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.619404 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.626815 4953 scope.go:117] "RemoveContainer" containerID="59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.633835 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-57d964cfc9-nsdbv" podStartSLOduration=2.951361875 podStartE2EDuration="4.6338092s" podCreationTimestamp="2025-10-11 03:03:38 +0000 UTC" firstStartedPulling="2025-10-11 03:03:39.640880227 +0000 UTC m=+1030.573967871" lastFinishedPulling="2025-10-11 03:03:41.323327552 +0000 UTC m=+1032.256415196" observedRunningTime="2025-10-11 03:03:42.598903289 +0000 UTC m=+1033.531990943" watchObservedRunningTime="2025-10-11 03:03:42.6338092 +0000 UTC m=+1033.566896844" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.653255 4953 scope.go:117] "RemoveContainer" containerID="083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.666767 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.667272 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-central-agent" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667299 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-central-agent" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.667334 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="sg-core" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667344 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="sg-core" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.667353 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-notification-agent" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667364 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-notification-agent" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.667373 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="proxy-httpd" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667382 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="proxy-httpd" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667597 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="proxy-httpd" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667647 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="sg-core" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667677 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-central-agent" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.667694 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" containerName="ceilometer-notification-agent" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.669738 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.669845 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.677737 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-55994c8b46-9zhbn" podStartSLOduration=3.058560932 podStartE2EDuration="4.677720061s" podCreationTimestamp="2025-10-11 03:03:38 +0000 UTC" firstStartedPulling="2025-10-11 03:03:39.704323997 +0000 UTC m=+1030.637411641" lastFinishedPulling="2025-10-11 03:03:41.323483126 +0000 UTC m=+1032.256570770" observedRunningTime="2025-10-11 03:03:42.636368706 +0000 UTC m=+1033.569456360" watchObservedRunningTime="2025-10-11 03:03:42.677720061 +0000 UTC m=+1033.610807705" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.677984 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.678142 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.706253 4953 scope.go:117] "RemoveContainer" containerID="2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.725962 4953 scope.go:117] "RemoveContainer" containerID="f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.728175 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2\": container with ID starting with f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2 not found: ID does not exist" containerID="f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.728223 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2"} err="failed to get container status \"f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2\": rpc error: code = NotFound desc = could not find container \"f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2\": container with ID starting with f790d9696f4a9975b9f3617fcea53d5d8f5614ccd4c91cf24cf87f7f452679d2 not found: ID does not exist" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.728253 4953 scope.go:117] "RemoveContainer" containerID="59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.728953 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd\": container with ID starting with 59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd not found: ID does not exist" containerID="59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.728982 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd"} err="failed to get container status \"59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd\": rpc error: code = NotFound desc = could not find container \"59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd\": container with ID starting with 59a7e4d603a30663b9267cc5f69be01e4148621227439983176054fce1d5e5cd not found: ID does not exist" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.729000 4953 scope.go:117] "RemoveContainer" containerID="083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.729191 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c\": container with ID starting with 083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c not found: ID does not exist" containerID="083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.729213 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c"} err="failed to get container status \"083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c\": rpc error: code = NotFound desc = could not find container \"083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c\": container with ID starting with 083bd3888badd7e1edcf4d20bcbbbcaf5b7150d252a0499be1554a4d3b3be41c not found: ID does not exist" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.729225 4953 scope.go:117] "RemoveContainer" containerID="2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c" Oct 11 03:03:42 crc kubenswrapper[4953]: E1011 03:03:42.729501 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c\": container with ID starting with 2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c not found: ID does not exist" containerID="2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.729522 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c"} err="failed to get container status \"2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c\": rpc error: code = NotFound desc = could not find container \"2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c\": container with ID starting with 2f72a2977b352163128e5778e413991ef0bb5eff0a8c3759f7b4a2a3f9d17f6c not found: ID does not exist" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771167 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771322 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-log-httpd\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771412 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-scripts\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771490 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-config-data\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771525 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771654 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfw75\" (UniqueName: \"kubernetes.io/projected/44e273a8-65b4-4990-ba54-bc862bcd805f-kube-api-access-rfw75\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.771719 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-run-httpd\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873270 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-scripts\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873346 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-config-data\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873367 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873407 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfw75\" (UniqueName: \"kubernetes.io/projected/44e273a8-65b4-4990-ba54-bc862bcd805f-kube-api-access-rfw75\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873428 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-run-httpd\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873459 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.873510 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-log-httpd\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.874133 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-log-httpd\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.877072 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-run-httpd\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.879404 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-scripts\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.883495 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-config-data\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.883567 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.883952 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.896353 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfw75\" (UniqueName: \"kubernetes.io/projected/44e273a8-65b4-4990-ba54-bc862bcd805f-kube-api-access-rfw75\") pod \"ceilometer-0\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " pod="openstack/ceilometer-0" Oct 11 03:03:42 crc kubenswrapper[4953]: I1011 03:03:42.985832 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.085223 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.177414 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-combined-ca-bundle\") pod \"244dbbcd-798d-4069-8f37-83a0391a98d6\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.177560 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdtf9\" (UniqueName: \"kubernetes.io/projected/244dbbcd-798d-4069-8f37-83a0391a98d6-kube-api-access-xdtf9\") pod \"244dbbcd-798d-4069-8f37-83a0391a98d6\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.177781 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-config\") pod \"244dbbcd-798d-4069-8f37-83a0391a98d6\" (UID: \"244dbbcd-798d-4069-8f37-83a0391a98d6\") " Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.182758 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/244dbbcd-798d-4069-8f37-83a0391a98d6-kube-api-access-xdtf9" (OuterVolumeSpecName: "kube-api-access-xdtf9") pod "244dbbcd-798d-4069-8f37-83a0391a98d6" (UID: "244dbbcd-798d-4069-8f37-83a0391a98d6"). InnerVolumeSpecName "kube-api-access-xdtf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.200320 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-config" (OuterVolumeSpecName: "config") pod "244dbbcd-798d-4069-8f37-83a0391a98d6" (UID: "244dbbcd-798d-4069-8f37-83a0391a98d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.201322 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "244dbbcd-798d-4069-8f37-83a0391a98d6" (UID: "244dbbcd-798d-4069-8f37-83a0391a98d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.283332 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.283387 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdtf9\" (UniqueName: \"kubernetes.io/projected/244dbbcd-798d-4069-8f37-83a0391a98d6-kube-api-access-xdtf9\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.283412 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/244dbbcd-798d-4069-8f37-83a0391a98d6-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.430852 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.584019 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n95ss" event={"ID":"244dbbcd-798d-4069-8f37-83a0391a98d6","Type":"ContainerDied","Data":"999aa2286d7e0ea274024224679f1e8228bd5abf57dbff20973eba72e22e9329"} Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.584519 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="999aa2286d7e0ea274024224679f1e8228bd5abf57dbff20973eba72e22e9329" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.584217 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n95ss" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.587490 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerStarted","Data":"4b536996c70929bd437b1d6370bb47b08462f558385e2c3be1ff29fe04100ae8"} Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.589724 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5ccd664cc4-t4w8m" event={"ID":"b9eef66c-5177-4fe9-922f-099e01797490","Type":"ContainerStarted","Data":"2bfaf1ad22ccc70ec93f5261b5ebbcf680a3ac21abddbe8ab882b4bf563ed645"} Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.590094 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.616569 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5ccd664cc4-t4w8m" podStartSLOduration=2.6165452609999997 podStartE2EDuration="2.616545261s" podCreationTimestamp="2025-10-11 03:03:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:43.610063856 +0000 UTC m=+1034.543151510" watchObservedRunningTime="2025-10-11 03:03:43.616545261 +0000 UTC m=+1034.549632905" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.811064 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="739f1a09-0ddc-41e3-bfe8-f482621c83a4" path="/var/lib/kubelet/pods/739f1a09-0ddc-41e3-bfe8-f482621c83a4/volumes" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.821916 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699df9757c-q6p2b"] Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.822136 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" containerName="dnsmasq-dns" containerID="cri-o://7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7" gracePeriod=10 Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.834654 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb684768f-s8rxg"] Oct 11 03:03:43 crc kubenswrapper[4953]: E1011 03:03:43.835024 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244dbbcd-798d-4069-8f37-83a0391a98d6" containerName="neutron-db-sync" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.835044 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="244dbbcd-798d-4069-8f37-83a0391a98d6" containerName="neutron-db-sync" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.835202 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="244dbbcd-798d-4069-8f37-83a0391a98d6" containerName="neutron-db-sync" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.836226 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.851171 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb684768f-s8rxg"] Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.966107 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ff7cc76b4-qffvt"] Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.968277 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.976300 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-qxljb" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.976328 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.976423 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.977314 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.990167 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ff7cc76b4-qffvt"] Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.999125 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-config\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.999210 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbr9m\" (UniqueName: \"kubernetes.io/projected/5fe23efa-4623-415c-93ea-1bb629010c99-kube-api-access-qbr9m\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.999270 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.999313 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:43 crc kubenswrapper[4953]: I1011 03:03:43.999356 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-dns-svc\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101210 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-combined-ca-bundle\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101280 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhcqq\" (UniqueName: \"kubernetes.io/projected/d7a01ec2-7204-4043-a802-21ca042eea29-kube-api-access-dhcqq\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101332 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-ovndb-tls-certs\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101419 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-config\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101446 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-config\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101486 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbr9m\" (UniqueName: \"kubernetes.io/projected/5fe23efa-4623-415c-93ea-1bb629010c99-kube-api-access-qbr9m\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101523 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101554 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101583 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-dns-svc\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.101612 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-httpd-config\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.102650 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-config\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.102905 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.102907 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-dns-svc\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.102981 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.120214 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbr9m\" (UniqueName: \"kubernetes.io/projected/5fe23efa-4623-415c-93ea-1bb629010c99-kube-api-access-qbr9m\") pod \"dnsmasq-dns-6bb684768f-s8rxg\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.203430 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-config\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.203595 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-httpd-config\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.203703 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-combined-ca-bundle\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.203730 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhcqq\" (UniqueName: \"kubernetes.io/projected/d7a01ec2-7204-4043-a802-21ca042eea29-kube-api-access-dhcqq\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.203763 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-ovndb-tls-certs\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.208960 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-combined-ca-bundle\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.208974 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-ovndb-tls-certs\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.210239 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-config\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.211517 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-httpd-config\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.218837 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.225346 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhcqq\" (UniqueName: \"kubernetes.io/projected/d7a01ec2-7204-4043-a802-21ca042eea29-kube-api-access-dhcqq\") pod \"neutron-ff7cc76b4-qffvt\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.261570 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.368102 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.407189 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-nb\") pod \"2104fd51-0add-4da4-8df2-00b1befafe62\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.407232 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-dns-svc\") pod \"2104fd51-0add-4da4-8df2-00b1befafe62\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.407331 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-sb\") pod \"2104fd51-0add-4da4-8df2-00b1befafe62\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.407384 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd5jp\" (UniqueName: \"kubernetes.io/projected/2104fd51-0add-4da4-8df2-00b1befafe62-kube-api-access-sd5jp\") pod \"2104fd51-0add-4da4-8df2-00b1befafe62\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.407412 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-config\") pod \"2104fd51-0add-4da4-8df2-00b1befafe62\" (UID: \"2104fd51-0add-4da4-8df2-00b1befafe62\") " Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.421819 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2104fd51-0add-4da4-8df2-00b1befafe62-kube-api-access-sd5jp" (OuterVolumeSpecName: "kube-api-access-sd5jp") pod "2104fd51-0add-4da4-8df2-00b1befafe62" (UID: "2104fd51-0add-4da4-8df2-00b1befafe62"). InnerVolumeSpecName "kube-api-access-sd5jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.475024 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2104fd51-0add-4da4-8df2-00b1befafe62" (UID: "2104fd51-0add-4da4-8df2-00b1befafe62"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.476102 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2104fd51-0add-4da4-8df2-00b1befafe62" (UID: "2104fd51-0add-4da4-8df2-00b1befafe62"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.479161 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-config" (OuterVolumeSpecName: "config") pod "2104fd51-0add-4da4-8df2-00b1befafe62" (UID: "2104fd51-0add-4da4-8df2-00b1befafe62"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.511126 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd5jp\" (UniqueName: \"kubernetes.io/projected/2104fd51-0add-4da4-8df2-00b1befafe62-kube-api-access-sd5jp\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.511161 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.511170 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.511179 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.528518 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2104fd51-0add-4da4-8df2-00b1befafe62" (UID: "2104fd51-0add-4da4-8df2-00b1befafe62"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.604806 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerStarted","Data":"b84f8e4229580d44e28569ca011f8674452829abbed32fc9456bf1b06885a58a"} Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.606837 4953 generic.go:334] "Generic (PLEG): container finished" podID="2104fd51-0add-4da4-8df2-00b1befafe62" containerID="7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7" exitCode=0 Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.607031 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" event={"ID":"2104fd51-0add-4da4-8df2-00b1befafe62","Type":"ContainerDied","Data":"7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7"} Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.607394 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" event={"ID":"2104fd51-0add-4da4-8df2-00b1befafe62","Type":"ContainerDied","Data":"5dcb1d1a9fc645353a89cf287ca3e2efb15919bf7eac2fc1e9fb47dec6cc74da"} Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.607415 4953 scope.go:117] "RemoveContainer" containerID="7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.607098 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699df9757c-q6p2b" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.608333 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.612556 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2104fd51-0add-4da4-8df2-00b1befafe62-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.646523 4953 scope.go:117] "RemoveContainer" containerID="343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.654692 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699df9757c-q6p2b"] Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.661768 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-699df9757c-q6p2b"] Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.727969 4953 scope.go:117] "RemoveContainer" containerID="7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7" Oct 11 03:03:44 crc kubenswrapper[4953]: E1011 03:03:44.729780 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7\": container with ID starting with 7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7 not found: ID does not exist" containerID="7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.729833 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7"} err="failed to get container status \"7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7\": rpc error: code = NotFound desc = could not find container \"7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7\": container with ID starting with 7444b070c5f65a85be0a2f92c331e94c10e2007237efb51efaffd124d58880b7 not found: ID does not exist" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.729867 4953 scope.go:117] "RemoveContainer" containerID="343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.732635 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb684768f-s8rxg"] Oct 11 03:03:44 crc kubenswrapper[4953]: E1011 03:03:44.735028 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d\": container with ID starting with 343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d not found: ID does not exist" containerID="343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d" Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.735059 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d"} err="failed to get container status \"343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d\": rpc error: code = NotFound desc = could not find container \"343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d\": container with ID starting with 343dd22474b15c5fbc9708fcf54ba4332b27849deaadbb8ae9483715f4a2697d not found: ID does not exist" Oct 11 03:03:44 crc kubenswrapper[4953]: W1011 03:03:44.739878 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fe23efa_4623_415c_93ea_1bb629010c99.slice/crio-74618b6e536ef742f69432576e9f767eff0c44e286a13f2a8b16a952e8b3b1ba WatchSource:0}: Error finding container 74618b6e536ef742f69432576e9f767eff0c44e286a13f2a8b16a952e8b3b1ba: Status 404 returned error can't find the container with id 74618b6e536ef742f69432576e9f767eff0c44e286a13f2a8b16a952e8b3b1ba Oct 11 03:03:44 crc kubenswrapper[4953]: I1011 03:03:44.960414 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ff7cc76b4-qffvt"] Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.619785 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff7cc76b4-qffvt" event={"ID":"d7a01ec2-7204-4043-a802-21ca042eea29","Type":"ContainerStarted","Data":"7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434"} Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.620366 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.620384 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff7cc76b4-qffvt" event={"ID":"d7a01ec2-7204-4043-a802-21ca042eea29","Type":"ContainerStarted","Data":"05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d"} Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.620401 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff7cc76b4-qffvt" event={"ID":"d7a01ec2-7204-4043-a802-21ca042eea29","Type":"ContainerStarted","Data":"ae181d45c4cd11f8c04fa12d535f57634d7c2e3ea7dd7aded4da5ce99da52672"} Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.623062 4953 generic.go:334] "Generic (PLEG): container finished" podID="5fe23efa-4623-415c-93ea-1bb629010c99" containerID="521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da" exitCode=0 Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.623135 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" event={"ID":"5fe23efa-4623-415c-93ea-1bb629010c99","Type":"ContainerDied","Data":"521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da"} Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.623181 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" event={"ID":"5fe23efa-4623-415c-93ea-1bb629010c99","Type":"ContainerStarted","Data":"74618b6e536ef742f69432576e9f767eff0c44e286a13f2a8b16a952e8b3b1ba"} Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.625334 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerStarted","Data":"045a90bde669a89de83783668f8e04a229cd1956647addb8619925dd9618c46b"} Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.684410 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-ff7cc76b4-qffvt" podStartSLOduration=2.684392126 podStartE2EDuration="2.684392126s" podCreationTimestamp="2025-10-11 03:03:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:45.664133158 +0000 UTC m=+1036.597220802" watchObservedRunningTime="2025-10-11 03:03:45.684392126 +0000 UTC m=+1036.617479770" Oct 11 03:03:45 crc kubenswrapper[4953]: I1011 03:03:45.837444 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" path="/var/lib/kubelet/pods/2104fd51-0add-4da4-8df2-00b1befafe62/volumes" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.214400 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5658f8676c-24292"] Oct 11 03:03:46 crc kubenswrapper[4953]: E1011 03:03:46.215226 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" containerName="init" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.215244 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" containerName="init" Oct 11 03:03:46 crc kubenswrapper[4953]: E1011 03:03:46.215256 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" containerName="dnsmasq-dns" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.215264 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" containerName="dnsmasq-dns" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.216789 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2104fd51-0add-4da4-8df2-00b1befafe62" containerName="dnsmasq-dns" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.219429 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.226520 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.226736 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.230196 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5658f8676c-24292"] Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377028 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-httpd-config\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377125 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-ovndb-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377154 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l7kd\" (UniqueName: \"kubernetes.io/projected/f70d84e8-e7dc-41bc-ad84-227d742b8eae-kube-api-access-2l7kd\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377228 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-internal-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377405 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-config\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377471 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-combined-ca-bundle\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.377549 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-public-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480052 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l7kd\" (UniqueName: \"kubernetes.io/projected/f70d84e8-e7dc-41bc-ad84-227d742b8eae-kube-api-access-2l7kd\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480148 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-internal-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480203 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-config\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480236 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-combined-ca-bundle\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480275 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-public-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480320 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-httpd-config\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.480363 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-ovndb-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.487016 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-public-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.487315 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-ovndb-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.487465 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-config\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.489295 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-combined-ca-bundle\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.493296 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-httpd-config\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.493837 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f70d84e8-e7dc-41bc-ad84-227d742b8eae-internal-tls-certs\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.500987 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l7kd\" (UniqueName: \"kubernetes.io/projected/f70d84e8-e7dc-41bc-ad84-227d742b8eae-kube-api-access-2l7kd\") pod \"neutron-5658f8676c-24292\" (UID: \"f70d84e8-e7dc-41bc-ad84-227d742b8eae\") " pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.593959 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.638889 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" event={"ID":"5fe23efa-4623-415c-93ea-1bb629010c99","Type":"ContainerStarted","Data":"0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2"} Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.641691 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerStarted","Data":"9d943dd49525851bfe782fdcff919982a644d7bfc6662caba1d3f265de29bfdb"} Oct 11 03:03:46 crc kubenswrapper[4953]: I1011 03:03:46.684433 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.372378 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5658f8676c-24292"] Oct 11 03:03:47 crc kubenswrapper[4953]: W1011 03:03:47.376796 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf70d84e8_e7dc_41bc_ad84_227d742b8eae.slice/crio-22063ae9e53e716483950267e03aa95fed597ae94643cf97ea9c827b09fde9fc WatchSource:0}: Error finding container 22063ae9e53e716483950267e03aa95fed597ae94643cf97ea9c827b09fde9fc: Status 404 returned error can't find the container with id 22063ae9e53e716483950267e03aa95fed597ae94643cf97ea9c827b09fde9fc Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.667850 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5658f8676c-24292" event={"ID":"f70d84e8-e7dc-41bc-ad84-227d742b8eae","Type":"ContainerStarted","Data":"0df952e583c6795f00c9fc64b40deef2b43965bec884a45464d717fdd3b6de1b"} Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.668213 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5658f8676c-24292" event={"ID":"f70d84e8-e7dc-41bc-ad84-227d742b8eae","Type":"ContainerStarted","Data":"22063ae9e53e716483950267e03aa95fed597ae94643cf97ea9c827b09fde9fc"} Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.675709 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5xv4s" event={"ID":"37b17e82-1098-40f6-9a8b-ff2e863e5559","Type":"ContainerStarted","Data":"503ac283086d6618a753d7690294f54d4e579cc28d6ad20f3c4ce4d2f72c0dd3"} Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.676564 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.703846 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" podStartSLOduration=4.703824265 podStartE2EDuration="4.703824265s" podCreationTimestamp="2025-10-11 03:03:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:47.697724699 +0000 UTC m=+1038.630812363" watchObservedRunningTime="2025-10-11 03:03:47.703824265 +0000 UTC m=+1038.636911909" Oct 11 03:03:47 crc kubenswrapper[4953]: I1011 03:03:47.754232 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-5xv4s" podStartSLOduration=8.901993861 podStartE2EDuration="46.754199761s" podCreationTimestamp="2025-10-11 03:03:01 +0000 UTC" firstStartedPulling="2025-10-11 03:03:07.814080994 +0000 UTC m=+998.747168638" lastFinishedPulling="2025-10-11 03:03:45.666286894 +0000 UTC m=+1036.599374538" observedRunningTime="2025-10-11 03:03:47.719001643 +0000 UTC m=+1038.652089287" watchObservedRunningTime="2025-10-11 03:03:47.754199761 +0000 UTC m=+1038.687287395" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.572342 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6754fbff86-gtfd6" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.690151 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5658f8676c-24292" event={"ID":"f70d84e8-e7dc-41bc-ad84-227d742b8eae","Type":"ContainerStarted","Data":"ec79aa7b47fbda83eeee24d4a8a533cac14174aeab1b8704c535ba0e072e4e03"} Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.691644 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5658f8676c-24292" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.698995 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.704350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerStarted","Data":"d0fdf6cd7cc8c77289c69c39349eabfed9e58c43bc36128bb46b8f85e028692d"} Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.705591 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.730376 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5658f8676c-24292" podStartSLOduration=2.73035046 podStartE2EDuration="2.73035046s" podCreationTimestamp="2025-10-11 03:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:03:48.715978594 +0000 UTC m=+1039.649066238" watchObservedRunningTime="2025-10-11 03:03:48.73035046 +0000 UTC m=+1039.663438094" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.872162 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.899711 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.205781582 podStartE2EDuration="6.899686304s" podCreationTimestamp="2025-10-11 03:03:42 +0000 UTC" firstStartedPulling="2025-10-11 03:03:43.436551806 +0000 UTC m=+1034.369639500" lastFinishedPulling="2025-10-11 03:03:48.130456578 +0000 UTC m=+1039.063544222" observedRunningTime="2025-10-11 03:03:48.774835224 +0000 UTC m=+1039.707922878" watchObservedRunningTime="2025-10-11 03:03:48.899686304 +0000 UTC m=+1039.832773948" Oct 11 03:03:48 crc kubenswrapper[4953]: I1011 03:03:48.941290 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-766db79b7b-s7l54" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.514430 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.515429 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.517361 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-c7rdr" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.518690 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.518699 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.566162 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.661132 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/18487b9f-9425-4d2e-ab10-e1d1974783db-openstack-config\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.661304 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/18487b9f-9425-4d2e-ab10-e1d1974783db-openstack-config-secret\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.661365 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18487b9f-9425-4d2e-ab10-e1d1974783db-combined-ca-bundle\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.661402 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mffl\" (UniqueName: \"kubernetes.io/projected/18487b9f-9425-4d2e-ab10-e1d1974783db-kube-api-access-6mffl\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.764620 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/18487b9f-9425-4d2e-ab10-e1d1974783db-openstack-config-secret\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.764764 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18487b9f-9425-4d2e-ab10-e1d1974783db-combined-ca-bundle\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.764829 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mffl\" (UniqueName: \"kubernetes.io/projected/18487b9f-9425-4d2e-ab10-e1d1974783db-kube-api-access-6mffl\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.764892 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/18487b9f-9425-4d2e-ab10-e1d1974783db-openstack-config\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.770501 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/18487b9f-9425-4d2e-ab10-e1d1974783db-openstack-config\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.771856 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18487b9f-9425-4d2e-ab10-e1d1974783db-combined-ca-bundle\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.776117 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/18487b9f-9425-4d2e-ab10-e1d1974783db-openstack-config-secret\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.799938 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mffl\" (UniqueName: \"kubernetes.io/projected/18487b9f-9425-4d2e-ab10-e1d1974783db-kube-api-access-6mffl\") pod \"openstackclient\" (UID: \"18487b9f-9425-4d2e-ab10-e1d1974783db\") " pod="openstack/openstackclient" Oct 11 03:03:49 crc kubenswrapper[4953]: I1011 03:03:49.874465 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 11 03:03:50 crc kubenswrapper[4953]: I1011 03:03:50.547971 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 11 03:03:50 crc kubenswrapper[4953]: I1011 03:03:50.725284 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"18487b9f-9425-4d2e-ab10-e1d1974783db","Type":"ContainerStarted","Data":"dbb744e3f944d19574d580aebae016e8605941e09502d4c61c31a7550f86992e"} Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.313367 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.549789 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5ccd664cc4-t4w8m" Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.628199 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-67c7667658-mv75m"] Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.628580 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-67c7667658-mv75m" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api-log" containerID="cri-o://a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915" gracePeriod=30 Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.628745 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-67c7667658-mv75m" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api" containerID="cri-o://ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b" gracePeriod=30 Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.769701 4953 generic.go:334] "Generic (PLEG): container finished" podID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerID="a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915" exitCode=143 Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.769792 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67c7667658-mv75m" event={"ID":"e2ad8ed9-a225-4edf-90fe-620e277bcb63","Type":"ContainerDied","Data":"a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915"} Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.772869 4953 generic.go:334] "Generic (PLEG): container finished" podID="37b17e82-1098-40f6-9a8b-ff2e863e5559" containerID="503ac283086d6618a753d7690294f54d4e579cc28d6ad20f3c4ce4d2f72c0dd3" exitCode=0 Oct 11 03:03:53 crc kubenswrapper[4953]: I1011 03:03:53.773654 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5xv4s" event={"ID":"37b17e82-1098-40f6-9a8b-ff2e863e5559","Type":"ContainerDied","Data":"503ac283086d6618a753d7690294f54d4e579cc28d6ad20f3c4ce4d2f72c0dd3"} Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.221893 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.313951 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7987f74bbc-dcplw"] Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.316694 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" containerName="dnsmasq-dns" containerID="cri-o://3e668ce6ea362a40526cc9203aaac5b526d3ae2e0859f26e44648fcadc79cf5c" gracePeriod=10 Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.793256 4953 generic.go:334] "Generic (PLEG): container finished" podID="ed7285fa-8a49-4586-997c-e546a78ac436" containerID="3e668ce6ea362a40526cc9203aaac5b526d3ae2e0859f26e44648fcadc79cf5c" exitCode=0 Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.793966 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" event={"ID":"ed7285fa-8a49-4586-997c-e546a78ac436","Type":"ContainerDied","Data":"3e668ce6ea362a40526cc9203aaac5b526d3ae2e0859f26e44648fcadc79cf5c"} Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.794459 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" event={"ID":"ed7285fa-8a49-4586-997c-e546a78ac436","Type":"ContainerDied","Data":"a319bfaebcb7822e02d25ab0aa08027332caec8e8df3491692e5ed3073cf2a94"} Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.794503 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a319bfaebcb7822e02d25ab0aa08027332caec8e8df3491692e5ed3073cf2a94" Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.861728 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.929121 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-config\") pod \"ed7285fa-8a49-4586-997c-e546a78ac436\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.929243 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-dns-svc\") pod \"ed7285fa-8a49-4586-997c-e546a78ac436\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.929349 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-nb\") pod \"ed7285fa-8a49-4586-997c-e546a78ac436\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.929500 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x92nw\" (UniqueName: \"kubernetes.io/projected/ed7285fa-8a49-4586-997c-e546a78ac436-kube-api-access-x92nw\") pod \"ed7285fa-8a49-4586-997c-e546a78ac436\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.929551 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-sb\") pod \"ed7285fa-8a49-4586-997c-e546a78ac436\" (UID: \"ed7285fa-8a49-4586-997c-e546a78ac436\") " Oct 11 03:03:54 crc kubenswrapper[4953]: I1011 03:03:54.942359 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed7285fa-8a49-4586-997c-e546a78ac436-kube-api-access-x92nw" (OuterVolumeSpecName: "kube-api-access-x92nw") pod "ed7285fa-8a49-4586-997c-e546a78ac436" (UID: "ed7285fa-8a49-4586-997c-e546a78ac436"). InnerVolumeSpecName "kube-api-access-x92nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.033444 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed7285fa-8a49-4586-997c-e546a78ac436" (UID: "ed7285fa-8a49-4586-997c-e546a78ac436"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.033719 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x92nw\" (UniqueName: \"kubernetes.io/projected/ed7285fa-8a49-4586-997c-e546a78ac436-kube-api-access-x92nw\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.033742 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.038347 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-config" (OuterVolumeSpecName: "config") pod "ed7285fa-8a49-4586-997c-e546a78ac436" (UID: "ed7285fa-8a49-4586-997c-e546a78ac436"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.042190 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ed7285fa-8a49-4586-997c-e546a78ac436" (UID: "ed7285fa-8a49-4586-997c-e546a78ac436"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.047137 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ed7285fa-8a49-4586-997c-e546a78ac436" (UID: "ed7285fa-8a49-4586-997c-e546a78ac436"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.111441 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.134869 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.134900 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.134910 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed7285fa-8a49-4586-997c-e546a78ac436-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.236949 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37b17e82-1098-40f6-9a8b-ff2e863e5559-etc-machine-id\") pod \"37b17e82-1098-40f6-9a8b-ff2e863e5559\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237053 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m64zw\" (UniqueName: \"kubernetes.io/projected/37b17e82-1098-40f6-9a8b-ff2e863e5559-kube-api-access-m64zw\") pod \"37b17e82-1098-40f6-9a8b-ff2e863e5559\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237093 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-config-data\") pod \"37b17e82-1098-40f6-9a8b-ff2e863e5559\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237143 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-scripts\") pod \"37b17e82-1098-40f6-9a8b-ff2e863e5559\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237221 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-combined-ca-bundle\") pod \"37b17e82-1098-40f6-9a8b-ff2e863e5559\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237425 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-db-sync-config-data\") pod \"37b17e82-1098-40f6-9a8b-ff2e863e5559\" (UID: \"37b17e82-1098-40f6-9a8b-ff2e863e5559\") " Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237527 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/37b17e82-1098-40f6-9a8b-ff2e863e5559-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "37b17e82-1098-40f6-9a8b-ff2e863e5559" (UID: "37b17e82-1098-40f6-9a8b-ff2e863e5559"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.237909 4953 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/37b17e82-1098-40f6-9a8b-ff2e863e5559-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.242631 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "37b17e82-1098-40f6-9a8b-ff2e863e5559" (UID: "37b17e82-1098-40f6-9a8b-ff2e863e5559"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.242667 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-scripts" (OuterVolumeSpecName: "scripts") pod "37b17e82-1098-40f6-9a8b-ff2e863e5559" (UID: "37b17e82-1098-40f6-9a8b-ff2e863e5559"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.244977 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37b17e82-1098-40f6-9a8b-ff2e863e5559-kube-api-access-m64zw" (OuterVolumeSpecName: "kube-api-access-m64zw") pod "37b17e82-1098-40f6-9a8b-ff2e863e5559" (UID: "37b17e82-1098-40f6-9a8b-ff2e863e5559"). InnerVolumeSpecName "kube-api-access-m64zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.269085 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37b17e82-1098-40f6-9a8b-ff2e863e5559" (UID: "37b17e82-1098-40f6-9a8b-ff2e863e5559"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.287733 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-config-data" (OuterVolumeSpecName: "config-data") pod "37b17e82-1098-40f6-9a8b-ff2e863e5559" (UID: "37b17e82-1098-40f6-9a8b-ff2e863e5559"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.339318 4953 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.339356 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m64zw\" (UniqueName: \"kubernetes.io/projected/37b17e82-1098-40f6-9a8b-ff2e863e5559-kube-api-access-m64zw\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.339372 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.339386 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.339400 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b17e82-1098-40f6-9a8b-ff2e863e5559-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.829285 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7987f74bbc-dcplw" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.830324 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5xv4s" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.830937 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5xv4s" event={"ID":"37b17e82-1098-40f6-9a8b-ff2e863e5559","Type":"ContainerDied","Data":"581788e21a214c3e2b0982ce3d8bbb11b9224642ee027bebad4d68ec38fa3c66"} Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.831800 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="581788e21a214c3e2b0982ce3d8bbb11b9224642ee027bebad4d68ec38fa3c66" Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.888842 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7987f74bbc-dcplw"] Oct 11 03:03:55 crc kubenswrapper[4953]: I1011 03:03:55.896708 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7987f74bbc-dcplw"] Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.089050 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d97fcdd8f-7x4tv"] Oct 11 03:03:56 crc kubenswrapper[4953]: E1011 03:03:56.089366 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" containerName="dnsmasq-dns" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.089382 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" containerName="dnsmasq-dns" Oct 11 03:03:56 crc kubenswrapper[4953]: E1011 03:03:56.089397 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37b17e82-1098-40f6-9a8b-ff2e863e5559" containerName="cinder-db-sync" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.089403 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="37b17e82-1098-40f6-9a8b-ff2e863e5559" containerName="cinder-db-sync" Oct 11 03:03:56 crc kubenswrapper[4953]: E1011 03:03:56.089436 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" containerName="init" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.089442 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" containerName="init" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.089590 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" containerName="dnsmasq-dns" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.091149 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="37b17e82-1098-40f6-9a8b-ff2e863e5559" containerName="cinder-db-sync" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.092012 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.132807 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d97fcdd8f-7x4tv"] Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.155803 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.157920 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.161140 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-config\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.161215 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-nb\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.161251 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-dns-svc\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.161329 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-sb\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.161362 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw9rx\" (UniqueName: \"kubernetes.io/projected/11a120e2-cd03-4c63-8a70-a2bc67f5f511-kube-api-access-lw9rx\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.162068 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-65stp" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.162311 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.162394 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.172141 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.207814 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265618 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-sb\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265676 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv2qc\" (UniqueName: \"kubernetes.io/projected/947f2af4-1ca5-4da4-8a24-0644845addd2-kube-api-access-fv2qc\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265698 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw9rx\" (UniqueName: \"kubernetes.io/projected/11a120e2-cd03-4c63-8a70-a2bc67f5f511-kube-api-access-lw9rx\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265746 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-scripts\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265779 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265797 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265816 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265840 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-config\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265857 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/947f2af4-1ca5-4da4-8a24-0644845addd2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265881 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-nb\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.265919 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-dns-svc\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.266883 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-dns-svc\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.267460 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-sb\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.268390 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-config\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.269050 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-nb\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.269091 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.278416 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.283411 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.291528 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw9rx\" (UniqueName: \"kubernetes.io/projected/11a120e2-cd03-4c63-8a70-a2bc67f5f511-kube-api-access-lw9rx\") pod \"dnsmasq-dns-6d97fcdd8f-7x4tv\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.299520 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.371645 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv2qc\" (UniqueName: \"kubernetes.io/projected/947f2af4-1ca5-4da4-8a24-0644845addd2-kube-api-access-fv2qc\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.373322 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data-custom\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.373586 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.373800 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-scripts\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.374360 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.374571 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.374769 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375004 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb09975c-0012-4449-bc90-1838c68c97f4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375118 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/947f2af4-1ca5-4da4-8a24-0644845addd2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375230 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/947f2af4-1ca5-4da4-8a24-0644845addd2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375378 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb09975c-0012-4449-bc90-1838c68c97f4-logs\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375502 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckrvh\" (UniqueName: \"kubernetes.io/projected/fb09975c-0012-4449-bc90-1838c68c97f4-kube-api-access-ckrvh\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375634 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-scripts\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.375746 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.379681 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.380222 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.380582 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-scripts\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.381095 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.392119 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv2qc\" (UniqueName: \"kubernetes.io/projected/947f2af4-1ca5-4da4-8a24-0644845addd2-kube-api-access-fv2qc\") pod \"cinder-scheduler-0\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.424458 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.485307 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb09975c-0012-4449-bc90-1838c68c97f4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.485708 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb09975c-0012-4449-bc90-1838c68c97f4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.485867 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb09975c-0012-4449-bc90-1838c68c97f4-logs\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.485888 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckrvh\" (UniqueName: \"kubernetes.io/projected/fb09975c-0012-4449-bc90-1838c68c97f4-kube-api-access-ckrvh\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.486261 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb09975c-0012-4449-bc90-1838c68c97f4-logs\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.486347 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-scripts\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.486875 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.486985 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data-custom\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.487031 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.490337 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.492257 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-scripts\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.493107 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.496342 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data-custom\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.505824 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckrvh\" (UniqueName: \"kubernetes.io/projected/fb09975c-0012-4449-bc90-1838c68c97f4-kube-api-access-ckrvh\") pod \"cinder-api-0\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.514412 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.662807 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.827033 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-67c7667658-mv75m" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.146:9311/healthcheck\": read tcp 10.217.0.2:51090->10.217.0.146:9311: read: connection reset by peer" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.827799 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-67c7667658-mv75m" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.146:9311/healthcheck\": read tcp 10.217.0.2:51092->10.217.0.146:9311: read: connection reset by peer" Oct 11 03:03:56 crc kubenswrapper[4953]: I1011 03:03:56.939220 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d97fcdd8f-7x4tv"] Oct 11 03:03:56 crc kubenswrapper[4953]: W1011 03:03:56.967291 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11a120e2_cd03_4c63_8a70_a2bc67f5f511.slice/crio-60d531642075e252b1fb8f2e7ba02f8f6431ecacfe6feacc2463dc314cf02f11 WatchSource:0}: Error finding container 60d531642075e252b1fb8f2e7ba02f8f6431ecacfe6feacc2463dc314cf02f11: Status 404 returned error can't find the container with id 60d531642075e252b1fb8f2e7ba02f8f6431ecacfe6feacc2463dc314cf02f11 Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.150019 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:03:57 crc kubenswrapper[4953]: W1011 03:03:57.180807 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod947f2af4_1ca5_4da4_8a24_0644845addd2.slice/crio-b30322ad5a48dc33b84a75016d769842ae6ebc6ef4bd884ccbf3afad9e5ae3e2 WatchSource:0}: Error finding container b30322ad5a48dc33b84a75016d769842ae6ebc6ef4bd884ccbf3afad9e5ae3e2: Status 404 returned error can't find the container with id b30322ad5a48dc33b84a75016d769842ae6ebc6ef4bd884ccbf3afad9e5ae3e2 Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.326652 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.404098 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqnc6\" (UniqueName: \"kubernetes.io/projected/e2ad8ed9-a225-4edf-90fe-620e277bcb63-kube-api-access-jqnc6\") pod \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.404260 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data\") pod \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.404308 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-combined-ca-bundle\") pod \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.404343 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ad8ed9-a225-4edf-90fe-620e277bcb63-logs\") pod \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.404425 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data-custom\") pod \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\" (UID: \"e2ad8ed9-a225-4edf-90fe-620e277bcb63\") " Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.408104 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2ad8ed9-a225-4edf-90fe-620e277bcb63-logs" (OuterVolumeSpecName: "logs") pod "e2ad8ed9-a225-4edf-90fe-620e277bcb63" (UID: "e2ad8ed9-a225-4edf-90fe-620e277bcb63"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.415722 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2ad8ed9-a225-4edf-90fe-620e277bcb63-kube-api-access-jqnc6" (OuterVolumeSpecName: "kube-api-access-jqnc6") pod "e2ad8ed9-a225-4edf-90fe-620e277bcb63" (UID: "e2ad8ed9-a225-4edf-90fe-620e277bcb63"). InnerVolumeSpecName "kube-api-access-jqnc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.415845 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e2ad8ed9-a225-4edf-90fe-620e277bcb63" (UID: "e2ad8ed9-a225-4edf-90fe-620e277bcb63"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.434515 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2ad8ed9-a225-4edf-90fe-620e277bcb63" (UID: "e2ad8ed9-a225-4edf-90fe-620e277bcb63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.446823 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.469507 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data" (OuterVolumeSpecName: "config-data") pod "e2ad8ed9-a225-4edf-90fe-620e277bcb63" (UID: "e2ad8ed9-a225-4edf-90fe-620e277bcb63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:03:57 crc kubenswrapper[4953]: W1011 03:03:57.483058 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb09975c_0012_4449_bc90_1838c68c97f4.slice/crio-b29dd5031a1132f5fc11acc92e32cc0a5a6c5a89790dae159a153c9d4063e8d3 WatchSource:0}: Error finding container b29dd5031a1132f5fc11acc92e32cc0a5a6c5a89790dae159a153c9d4063e8d3: Status 404 returned error can't find the container with id b29dd5031a1132f5fc11acc92e32cc0a5a6c5a89790dae159a153c9d4063e8d3 Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.515084 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.515125 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2ad8ed9-a225-4edf-90fe-620e277bcb63-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.515135 4953 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.515144 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqnc6\" (UniqueName: \"kubernetes.io/projected/e2ad8ed9-a225-4edf-90fe-620e277bcb63-kube-api-access-jqnc6\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.515154 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2ad8ed9-a225-4edf-90fe-620e277bcb63-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.824067 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed7285fa-8a49-4586-997c-e546a78ac436" path="/var/lib/kubelet/pods/ed7285fa-8a49-4586-997c-e546a78ac436/volumes" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.900129 4953 generic.go:334] "Generic (PLEG): container finished" podID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerID="f024f209ecc3c5055d88b3ce6c7de3b6d8ef5997b76ae61962f64988a02c6ed2" exitCode=0 Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.900652 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" event={"ID":"11a120e2-cd03-4c63-8a70-a2bc67f5f511","Type":"ContainerDied","Data":"f024f209ecc3c5055d88b3ce6c7de3b6d8ef5997b76ae61962f64988a02c6ed2"} Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.900692 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" event={"ID":"11a120e2-cd03-4c63-8a70-a2bc67f5f511","Type":"ContainerStarted","Data":"60d531642075e252b1fb8f2e7ba02f8f6431ecacfe6feacc2463dc314cf02f11"} Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.912328 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"947f2af4-1ca5-4da4-8a24-0644845addd2","Type":"ContainerStarted","Data":"b30322ad5a48dc33b84a75016d769842ae6ebc6ef4bd884ccbf3afad9e5ae3e2"} Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.917914 4953 generic.go:334] "Generic (PLEG): container finished" podID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerID="ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b" exitCode=0 Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.917989 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67c7667658-mv75m" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.918027 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67c7667658-mv75m" event={"ID":"e2ad8ed9-a225-4edf-90fe-620e277bcb63","Type":"ContainerDied","Data":"ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b"} Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.918064 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67c7667658-mv75m" event={"ID":"e2ad8ed9-a225-4edf-90fe-620e277bcb63","Type":"ContainerDied","Data":"4f43b0c3fa8060e322d0d03b66a89ddb88e989c1ee8bb4f5f8199b4cd92be16f"} Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.918088 4953 scope.go:117] "RemoveContainer" containerID="ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b" Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.925017 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fb09975c-0012-4449-bc90-1838c68c97f4","Type":"ContainerStarted","Data":"b29dd5031a1132f5fc11acc92e32cc0a5a6c5a89790dae159a153c9d4063e8d3"} Oct 11 03:03:57 crc kubenswrapper[4953]: I1011 03:03:57.989055 4953 scope.go:117] "RemoveContainer" containerID="a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915" Oct 11 03:03:58 crc kubenswrapper[4953]: I1011 03:03:58.018940 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-67c7667658-mv75m"] Oct 11 03:03:58 crc kubenswrapper[4953]: I1011 03:03:58.036111 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-67c7667658-mv75m"] Oct 11 03:03:58 crc kubenswrapper[4953]: I1011 03:03:58.405885 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:03:58 crc kubenswrapper[4953]: I1011 03:03:58.934722 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fb09975c-0012-4449-bc90-1838c68c97f4","Type":"ContainerStarted","Data":"e79aacec507f50fe57dddaa95e7df358b9a89f5af5321d51bee2b9653bd09781"} Oct 11 03:03:59 crc kubenswrapper[4953]: I1011 03:03:59.805792 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" path="/var/lib/kubelet/pods/e2ad8ed9-a225-4edf-90fe-620e277bcb63/volumes" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.697953 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-qxgn4"] Oct 11 03:04:01 crc kubenswrapper[4953]: E1011 03:04:01.698626 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api-log" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.698639 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api-log" Oct 11 03:04:01 crc kubenswrapper[4953]: E1011 03:04:01.698660 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.698666 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.698814 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api-log" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.698824 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2ad8ed9-a225-4edf-90fe-620e277bcb63" containerName="barbican-api" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.699418 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.715555 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-qxgn4"] Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.801009 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzk4m\" (UniqueName: \"kubernetes.io/projected/c3b2030c-9a69-4ea0-9253-9bf9a6aee523-kube-api-access-bzk4m\") pod \"nova-api-db-create-qxgn4\" (UID: \"c3b2030c-9a69-4ea0-9253-9bf9a6aee523\") " pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.807789 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-nffxt"] Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.808789 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.814667 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nffxt"] Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.903046 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr9l5\" (UniqueName: \"kubernetes.io/projected/74c049c7-ed9a-4591-a366-8a18852a3d91-kube-api-access-lr9l5\") pod \"nova-cell0-db-create-nffxt\" (UID: \"74c049c7-ed9a-4591-a366-8a18852a3d91\") " pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.903182 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzk4m\" (UniqueName: \"kubernetes.io/projected/c3b2030c-9a69-4ea0-9253-9bf9a6aee523-kube-api-access-bzk4m\") pod \"nova-api-db-create-qxgn4\" (UID: \"c3b2030c-9a69-4ea0-9253-9bf9a6aee523\") " pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.905082 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-dcjsx"] Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.907665 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.913253 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-dcjsx"] Oct 11 03:04:01 crc kubenswrapper[4953]: I1011 03:04:01.932443 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzk4m\" (UniqueName: \"kubernetes.io/projected/c3b2030c-9a69-4ea0-9253-9bf9a6aee523-kube-api-access-bzk4m\") pod \"nova-api-db-create-qxgn4\" (UID: \"c3b2030c-9a69-4ea0-9253-9bf9a6aee523\") " pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.004663 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw9sz\" (UniqueName: \"kubernetes.io/projected/e88244cd-5c67-4f33-b40f-58682d29da8b-kube-api-access-fw9sz\") pod \"nova-cell1-db-create-dcjsx\" (UID: \"e88244cd-5c67-4f33-b40f-58682d29da8b\") " pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.005129 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr9l5\" (UniqueName: \"kubernetes.io/projected/74c049c7-ed9a-4591-a366-8a18852a3d91-kube-api-access-lr9l5\") pod \"nova-cell0-db-create-nffxt\" (UID: \"74c049c7-ed9a-4591-a366-8a18852a3d91\") " pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.021927 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.024551 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr9l5\" (UniqueName: \"kubernetes.io/projected/74c049c7-ed9a-4591-a366-8a18852a3d91-kube-api-access-lr9l5\") pod \"nova-cell0-db-create-nffxt\" (UID: \"74c049c7-ed9a-4591-a366-8a18852a3d91\") " pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.107279 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw9sz\" (UniqueName: \"kubernetes.io/projected/e88244cd-5c67-4f33-b40f-58682d29da8b-kube-api-access-fw9sz\") pod \"nova-cell1-db-create-dcjsx\" (UID: \"e88244cd-5c67-4f33-b40f-58682d29da8b\") " pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.131759 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.133762 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw9sz\" (UniqueName: \"kubernetes.io/projected/e88244cd-5c67-4f33-b40f-58682d29da8b-kube-api-access-fw9sz\") pod \"nova-cell1-db-create-dcjsx\" (UID: \"e88244cd-5c67-4f33-b40f-58682d29da8b\") " pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.225566 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.479098 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.479372 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-central-agent" containerID="cri-o://b84f8e4229580d44e28569ca011f8674452829abbed32fc9456bf1b06885a58a" gracePeriod=30 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.479495 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="sg-core" containerID="cri-o://9d943dd49525851bfe782fdcff919982a644d7bfc6662caba1d3f265de29bfdb" gracePeriod=30 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.479492 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="proxy-httpd" containerID="cri-o://d0fdf6cd7cc8c77289c69c39349eabfed9e58c43bc36128bb46b8f85e028692d" gracePeriod=30 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.479549 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-notification-agent" containerID="cri-o://045a90bde669a89de83783668f8e04a229cd1956647addb8619925dd9618c46b" gracePeriod=30 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.490863 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.148:3000/\": EOF" Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971021 4953 generic.go:334] "Generic (PLEG): container finished" podID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerID="d0fdf6cd7cc8c77289c69c39349eabfed9e58c43bc36128bb46b8f85e028692d" exitCode=0 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971367 4953 generic.go:334] "Generic (PLEG): container finished" podID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerID="9d943dd49525851bfe782fdcff919982a644d7bfc6662caba1d3f265de29bfdb" exitCode=2 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971376 4953 generic.go:334] "Generic (PLEG): container finished" podID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerID="045a90bde669a89de83783668f8e04a229cd1956647addb8619925dd9618c46b" exitCode=0 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971386 4953 generic.go:334] "Generic (PLEG): container finished" podID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerID="b84f8e4229580d44e28569ca011f8674452829abbed32fc9456bf1b06885a58a" exitCode=0 Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971126 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerDied","Data":"d0fdf6cd7cc8c77289c69c39349eabfed9e58c43bc36128bb46b8f85e028692d"} Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971425 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerDied","Data":"9d943dd49525851bfe782fdcff919982a644d7bfc6662caba1d3f265de29bfdb"} Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971440 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerDied","Data":"045a90bde669a89de83783668f8e04a229cd1956647addb8619925dd9618c46b"} Oct 11 03:04:02 crc kubenswrapper[4953]: I1011 03:04:02.971452 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerDied","Data":"b84f8e4229580d44e28569ca011f8674452829abbed32fc9456bf1b06885a58a"} Oct 11 03:04:03 crc kubenswrapper[4953]: I1011 03:04:03.707160 4953 scope.go:117] "RemoveContainer" containerID="ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b" Oct 11 03:04:03 crc kubenswrapper[4953]: E1011 03:04:03.711379 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b\": container with ID starting with ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b not found: ID does not exist" containerID="ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b" Oct 11 03:04:03 crc kubenswrapper[4953]: I1011 03:04:03.711434 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b"} err="failed to get container status \"ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b\": rpc error: code = NotFound desc = could not find container \"ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b\": container with ID starting with ec0ab7e480ee23ed5ac9f8df70f47d8cc79e5b9a52dcc055884cda376b63890b not found: ID does not exist" Oct 11 03:04:03 crc kubenswrapper[4953]: I1011 03:04:03.711467 4953 scope.go:117] "RemoveContainer" containerID="a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915" Oct 11 03:04:03 crc kubenswrapper[4953]: E1011 03:04:03.711883 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915\": container with ID starting with a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915 not found: ID does not exist" containerID="a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915" Oct 11 03:04:03 crc kubenswrapper[4953]: I1011 03:04:03.711913 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915"} err="failed to get container status \"a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915\": rpc error: code = NotFound desc = could not find container \"a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915\": container with ID starting with a99699e77ff9c5b8b877b79b623de89dd67ec40e446afd0963e6f27fce443915 not found: ID does not exist" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.014699 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" event={"ID":"11a120e2-cd03-4c63-8a70-a2bc67f5f511","Type":"ContainerStarted","Data":"308d5b9022e2f106baf39159ddf4358883b0562355f2b8d698a2e429e6273987"} Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.015456 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.039462 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" podStartSLOduration=8.039446915 podStartE2EDuration="8.039446915s" podCreationTimestamp="2025-10-11 03:03:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:04:04.037740812 +0000 UTC m=+1054.970828456" watchObservedRunningTime="2025-10-11 03:04:04.039446915 +0000 UTC m=+1054.972534559" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.049175 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.167052 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-config-data\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.167439 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-sg-core-conf-yaml\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.167509 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfw75\" (UniqueName: \"kubernetes.io/projected/44e273a8-65b4-4990-ba54-bc862bcd805f-kube-api-access-rfw75\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.167549 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-scripts\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.167581 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-run-httpd\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.167668 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-log-httpd\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.168424 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.170043 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-combined-ca-bundle\") pod \"44e273a8-65b4-4990-ba54-bc862bcd805f\" (UID: \"44e273a8-65b4-4990-ba54-bc862bcd805f\") " Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.171303 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.173819 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.200066 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44e273a8-65b4-4990-ba54-bc862bcd805f-kube-api-access-rfw75" (OuterVolumeSpecName: "kube-api-access-rfw75") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "kube-api-access-rfw75". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.200174 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-scripts" (OuterVolumeSpecName: "scripts") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.254366 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.273532 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfw75\" (UniqueName: \"kubernetes.io/projected/44e273a8-65b4-4990-ba54-bc862bcd805f-kube-api-access-rfw75\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.273563 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.273572 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44e273a8-65b4-4990-ba54-bc862bcd805f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.273582 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.339819 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.339862 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-qxgn4"] Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.349779 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-config-data" (OuterVolumeSpecName: "config-data") pod "44e273a8-65b4-4990-ba54-bc862bcd805f" (UID: "44e273a8-65b4-4990-ba54-bc862bcd805f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:04 crc kubenswrapper[4953]: W1011 03:04:04.353416 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3b2030c_9a69_4ea0_9253_9bf9a6aee523.slice/crio-8e8b5b0c5cf2471a9661112c40bdc439c7b939d2eebc2fe83b062c6f00fe781a WatchSource:0}: Error finding container 8e8b5b0c5cf2471a9661112c40bdc439c7b939d2eebc2fe83b062c6f00fe781a: Status 404 returned error can't find the container with id 8e8b5b0c5cf2471a9661112c40bdc439c7b939d2eebc2fe83b062c6f00fe781a Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.375347 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.375384 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e273a8-65b4-4990-ba54-bc862bcd805f-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.472380 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nffxt"] Oct 11 03:04:04 crc kubenswrapper[4953]: I1011 03:04:04.479042 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-dcjsx"] Oct 11 03:04:04 crc kubenswrapper[4953]: W1011 03:04:04.480966 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode88244cd_5c67_4f33_b40f_58682d29da8b.slice/crio-57a43ca1eea45079075571c3a8d2f7bdcacfe7fa1ba44643896ff86f93affac2 WatchSource:0}: Error finding container 57a43ca1eea45079075571c3a8d2f7bdcacfe7fa1ba44643896ff86f93affac2: Status 404 returned error can't find the container with id 57a43ca1eea45079075571c3a8d2f7bdcacfe7fa1ba44643896ff86f93affac2 Oct 11 03:04:04 crc kubenswrapper[4953]: W1011 03:04:04.487388 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74c049c7_ed9a_4591_a366_8a18852a3d91.slice/crio-182f2fa1bf5d9f060d62cc390304f16d8b09c3e1a7971037be3fc19b4b43db76 WatchSource:0}: Error finding container 182f2fa1bf5d9f060d62cc390304f16d8b09c3e1a7971037be3fc19b4b43db76: Status 404 returned error can't find the container with id 182f2fa1bf5d9f060d62cc390304f16d8b09c3e1a7971037be3fc19b4b43db76 Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.036925 4953 generic.go:334] "Generic (PLEG): container finished" podID="e88244cd-5c67-4f33-b40f-58682d29da8b" containerID="0920be9a5b17748cc144d82da50835e7d388ec1bb67bfd03e11b0b1b706b777d" exitCode=0 Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.038357 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-dcjsx" event={"ID":"e88244cd-5c67-4f33-b40f-58682d29da8b","Type":"ContainerDied","Data":"0920be9a5b17748cc144d82da50835e7d388ec1bb67bfd03e11b0b1b706b777d"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.038399 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-dcjsx" event={"ID":"e88244cd-5c67-4f33-b40f-58682d29da8b","Type":"ContainerStarted","Data":"57a43ca1eea45079075571c3a8d2f7bdcacfe7fa1ba44643896ff86f93affac2"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.051968 4953 generic.go:334] "Generic (PLEG): container finished" podID="c3b2030c-9a69-4ea0-9253-9bf9a6aee523" containerID="f6c3cc08ac704e094ce2446a9df093b9941b90ff5f070bead94fd17f67a5acbb" exitCode=0 Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.052084 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qxgn4" event={"ID":"c3b2030c-9a69-4ea0-9253-9bf9a6aee523","Type":"ContainerDied","Data":"f6c3cc08ac704e094ce2446a9df093b9941b90ff5f070bead94fd17f67a5acbb"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.052119 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qxgn4" event={"ID":"c3b2030c-9a69-4ea0-9253-9bf9a6aee523","Type":"ContainerStarted","Data":"8e8b5b0c5cf2471a9661112c40bdc439c7b939d2eebc2fe83b062c6f00fe781a"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.055513 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"18487b9f-9425-4d2e-ab10-e1d1974783db","Type":"ContainerStarted","Data":"ba6fdb9624eb9c250046e9470fb3c9bf209160accc702f907e561b2d1fcda6d0"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.062765 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"44e273a8-65b4-4990-ba54-bc862bcd805f","Type":"ContainerDied","Data":"4b536996c70929bd437b1d6370bb47b08462f558385e2c3be1ff29fe04100ae8"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.062808 4953 scope.go:117] "RemoveContainer" containerID="d0fdf6cd7cc8c77289c69c39349eabfed9e58c43bc36128bb46b8f85e028692d" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.063270 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.082366 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"947f2af4-1ca5-4da4-8a24-0644845addd2","Type":"ContainerStarted","Data":"1ae0015f4fa143363a5c595731567e0a98c5860c610dac7a56810fd10db03f30"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.107164 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.859474182 podStartE2EDuration="16.10714363s" podCreationTimestamp="2025-10-11 03:03:49 +0000 UTC" firstStartedPulling="2025-10-11 03:03:50.549959488 +0000 UTC m=+1041.483047132" lastFinishedPulling="2025-10-11 03:04:03.797628936 +0000 UTC m=+1054.730716580" observedRunningTime="2025-10-11 03:04:05.106093634 +0000 UTC m=+1056.039181278" watchObservedRunningTime="2025-10-11 03:04:05.10714363 +0000 UTC m=+1056.040231274" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.112705 4953 generic.go:334] "Generic (PLEG): container finished" podID="74c049c7-ed9a-4591-a366-8a18852a3d91" containerID="76e6f04613ca2f0a55487af8a9fc0eeda75161b1a86fd0eec89bed9bdc5711a7" exitCode=0 Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.112819 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nffxt" event={"ID":"74c049c7-ed9a-4591-a366-8a18852a3d91","Type":"ContainerDied","Data":"76e6f04613ca2f0a55487af8a9fc0eeda75161b1a86fd0eec89bed9bdc5711a7"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.112862 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nffxt" event={"ID":"74c049c7-ed9a-4591-a366-8a18852a3d91","Type":"ContainerStarted","Data":"182f2fa1bf5d9f060d62cc390304f16d8b09c3e1a7971037be3fc19b4b43db76"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.157713 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.160502 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api-log" containerID="cri-o://e79aacec507f50fe57dddaa95e7df358b9a89f5af5321d51bee2b9653bd09781" gracePeriod=30 Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.160868 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fb09975c-0012-4449-bc90-1838c68c97f4","Type":"ContainerStarted","Data":"ea55292d376c64d09ec7ed4ba3c80c75a401eaae4a326ffb0c25ae03acea9ca4"} Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.160944 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.160986 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api" containerID="cri-o://ea55292d376c64d09ec7ed4ba3c80c75a401eaae4a326ffb0c25ae03acea9ca4" gracePeriod=30 Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.174196 4953 scope.go:117] "RemoveContainer" containerID="9d943dd49525851bfe782fdcff919982a644d7bfc6662caba1d3f265de29bfdb" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.177553 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.205548 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:05 crc kubenswrapper[4953]: E1011 03:04:05.206093 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="sg-core" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206112 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="sg-core" Oct 11 03:04:05 crc kubenswrapper[4953]: E1011 03:04:05.206135 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-central-agent" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206143 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-central-agent" Oct 11 03:04:05 crc kubenswrapper[4953]: E1011 03:04:05.206151 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-notification-agent" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206160 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-notification-agent" Oct 11 03:04:05 crc kubenswrapper[4953]: E1011 03:04:05.206167 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="proxy-httpd" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206173 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="proxy-httpd" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206354 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-central-agent" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206379 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="proxy-httpd" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206388 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="ceilometer-notification-agent" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.206398 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" containerName="sg-core" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.208009 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.219591 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.220045 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.234046 4953 scope.go:117] "RemoveContainer" containerID="045a90bde669a89de83783668f8e04a229cd1956647addb8619925dd9618c46b" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.242312 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.246492 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=9.246464219 podStartE2EDuration="9.246464219s" podCreationTimestamp="2025-10-11 03:03:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:04:05.223156955 +0000 UTC m=+1056.156244599" watchObservedRunningTime="2025-10-11 03:04:05.246464219 +0000 UTC m=+1056.179551863" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299639 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-config-data\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299693 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299732 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299756 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-run-httpd\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299774 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-log-httpd\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299963 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-scripts\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.299996 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlvtf\" (UniqueName: \"kubernetes.io/projected/dd7a13f0-ff44-426e-81b6-553986fd8052-kube-api-access-mlvtf\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.381243 4953 scope.go:117] "RemoveContainer" containerID="b84f8e4229580d44e28569ca011f8674452829abbed32fc9456bf1b06885a58a" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401415 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-scripts\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401464 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlvtf\" (UniqueName: \"kubernetes.io/projected/dd7a13f0-ff44-426e-81b6-553986fd8052-kube-api-access-mlvtf\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401509 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-config-data\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401535 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401561 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401596 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-run-httpd\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.401633 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-log-httpd\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.402002 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-log-httpd\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.406910 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-run-httpd\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.416733 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.417089 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.421982 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-scripts\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.424497 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlvtf\" (UniqueName: \"kubernetes.io/projected/dd7a13f0-ff44-426e-81b6-553986fd8052-kube-api-access-mlvtf\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.434756 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-config-data\") pod \"ceilometer-0\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.546055 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:05 crc kubenswrapper[4953]: I1011 03:04:05.812314 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44e273a8-65b4-4990-ba54-bc862bcd805f" path="/var/lib/kubelet/pods/44e273a8-65b4-4990-ba54-bc862bcd805f/volumes" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.000533 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:06 crc kubenswrapper[4953]: W1011 03:04:06.005934 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd7a13f0_ff44_426e_81b6_553986fd8052.slice/crio-d90b65cf601527dc9a86e82db7e3b778b6d5548efa50e2923ebe9ab4a1a9e3db WatchSource:0}: Error finding container d90b65cf601527dc9a86e82db7e3b778b6d5548efa50e2923ebe9ab4a1a9e3db: Status 404 returned error can't find the container with id d90b65cf601527dc9a86e82db7e3b778b6d5548efa50e2923ebe9ab4a1a9e3db Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.168989 4953 generic.go:334] "Generic (PLEG): container finished" podID="fb09975c-0012-4449-bc90-1838c68c97f4" containerID="e79aacec507f50fe57dddaa95e7df358b9a89f5af5321d51bee2b9653bd09781" exitCode=143 Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.169083 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fb09975c-0012-4449-bc90-1838c68c97f4","Type":"ContainerDied","Data":"e79aacec507f50fe57dddaa95e7df358b9a89f5af5321d51bee2b9653bd09781"} Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.172627 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerStarted","Data":"d90b65cf601527dc9a86e82db7e3b778b6d5548efa50e2923ebe9ab4a1a9e3db"} Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.174951 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"947f2af4-1ca5-4da4-8a24-0644845addd2","Type":"ContainerStarted","Data":"bafd156a842f3a8f629b515f2db73a5f408ef40ff7441e1ec7e3a7d97c7995ad"} Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.447134 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=9.607637357 podStartE2EDuration="10.447100119s" podCreationTimestamp="2025-10-11 03:03:56 +0000 UTC" firstStartedPulling="2025-10-11 03:03:57.188024224 +0000 UTC m=+1048.121111868" lastFinishedPulling="2025-10-11 03:03:58.027486986 +0000 UTC m=+1048.960574630" observedRunningTime="2025-10-11 03:04:06.20758744 +0000 UTC m=+1057.140675094" watchObservedRunningTime="2025-10-11 03:04:06.447100119 +0000 UTC m=+1057.380187764" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.462224 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.515307 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.787355 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.821067 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.940384 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr9l5\" (UniqueName: \"kubernetes.io/projected/74c049c7-ed9a-4591-a366-8a18852a3d91-kube-api-access-lr9l5\") pod \"74c049c7-ed9a-4591-a366-8a18852a3d91\" (UID: \"74c049c7-ed9a-4591-a366-8a18852a3d91\") " Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.940571 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzk4m\" (UniqueName: \"kubernetes.io/projected/c3b2030c-9a69-4ea0-9253-9bf9a6aee523-kube-api-access-bzk4m\") pod \"c3b2030c-9a69-4ea0-9253-9bf9a6aee523\" (UID: \"c3b2030c-9a69-4ea0-9253-9bf9a6aee523\") " Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.952483 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3b2030c-9a69-4ea0-9253-9bf9a6aee523-kube-api-access-bzk4m" (OuterVolumeSpecName: "kube-api-access-bzk4m") pod "c3b2030c-9a69-4ea0-9253-9bf9a6aee523" (UID: "c3b2030c-9a69-4ea0-9253-9bf9a6aee523"). InnerVolumeSpecName "kube-api-access-bzk4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.954592 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74c049c7-ed9a-4591-a366-8a18852a3d91-kube-api-access-lr9l5" (OuterVolumeSpecName: "kube-api-access-lr9l5") pod "74c049c7-ed9a-4591-a366-8a18852a3d91" (UID: "74c049c7-ed9a-4591-a366-8a18852a3d91"). InnerVolumeSpecName "kube-api-access-lr9l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.956251 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.959988 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzk4m\" (UniqueName: \"kubernetes.io/projected/c3b2030c-9a69-4ea0-9253-9bf9a6aee523-kube-api-access-bzk4m\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:06 crc kubenswrapper[4953]: I1011 03:04:06.960048 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr9l5\" (UniqueName: \"kubernetes.io/projected/74c049c7-ed9a-4591-a366-8a18852a3d91-kube-api-access-lr9l5\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.061441 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw9sz\" (UniqueName: \"kubernetes.io/projected/e88244cd-5c67-4f33-b40f-58682d29da8b-kube-api-access-fw9sz\") pod \"e88244cd-5c67-4f33-b40f-58682d29da8b\" (UID: \"e88244cd-5c67-4f33-b40f-58682d29da8b\") " Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.067918 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e88244cd-5c67-4f33-b40f-58682d29da8b-kube-api-access-fw9sz" (OuterVolumeSpecName: "kube-api-access-fw9sz") pod "e88244cd-5c67-4f33-b40f-58682d29da8b" (UID: "e88244cd-5c67-4f33-b40f-58682d29da8b"). InnerVolumeSpecName "kube-api-access-fw9sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.163178 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw9sz\" (UniqueName: \"kubernetes.io/projected/e88244cd-5c67-4f33-b40f-58682d29da8b-kube-api-access-fw9sz\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.185876 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nffxt" event={"ID":"74c049c7-ed9a-4591-a366-8a18852a3d91","Type":"ContainerDied","Data":"182f2fa1bf5d9f060d62cc390304f16d8b09c3e1a7971037be3fc19b4b43db76"} Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.186892 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="182f2fa1bf5d9f060d62cc390304f16d8b09c3e1a7971037be3fc19b4b43db76" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.186992 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nffxt" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.191635 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-dcjsx" event={"ID":"e88244cd-5c67-4f33-b40f-58682d29da8b","Type":"ContainerDied","Data":"57a43ca1eea45079075571c3a8d2f7bdcacfe7fa1ba44643896ff86f93affac2"} Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.191672 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-dcjsx" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.191675 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57a43ca1eea45079075571c3a8d2f7bdcacfe7fa1ba44643896ff86f93affac2" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.197845 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-qxgn4" event={"ID":"c3b2030c-9a69-4ea0-9253-9bf9a6aee523","Type":"ContainerDied","Data":"8e8b5b0c5cf2471a9661112c40bdc439c7b939d2eebc2fe83b062c6f00fe781a"} Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.197868 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e8b5b0c5cf2471a9661112c40bdc439c7b939d2eebc2fe83b062c6f00fe781a" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.198084 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-qxgn4" Oct 11 03:04:07 crc kubenswrapper[4953]: I1011 03:04:07.200263 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerStarted","Data":"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092"} Oct 11 03:04:08 crc kubenswrapper[4953]: I1011 03:04:08.214390 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerStarted","Data":"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715"} Oct 11 03:04:08 crc kubenswrapper[4953]: I1011 03:04:08.215757 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerStarted","Data":"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0"} Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.234383 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerStarted","Data":"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa"} Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.235001 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.234743 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="proxy-httpd" containerID="cri-o://7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" gracePeriod=30 Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.234517 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-central-agent" containerID="cri-o://a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" gracePeriod=30 Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.234758 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="sg-core" containerID="cri-o://7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" gracePeriod=30 Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.234768 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-notification-agent" containerID="cri-o://0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" gracePeriod=30 Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.265872 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.986712584 podStartE2EDuration="5.265846824s" podCreationTimestamp="2025-10-11 03:04:05 +0000 UTC" firstStartedPulling="2025-10-11 03:04:06.014701377 +0000 UTC m=+1056.947789021" lastFinishedPulling="2025-10-11 03:04:09.293835617 +0000 UTC m=+1060.226923261" observedRunningTime="2025-10-11 03:04:10.259198425 +0000 UTC m=+1061.192286079" watchObservedRunningTime="2025-10-11 03:04:10.265846824 +0000 UTC m=+1061.198934478" Oct 11 03:04:10 crc kubenswrapper[4953]: I1011 03:04:10.970960 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035525 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-run-httpd\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035592 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-sg-core-conf-yaml\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035635 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-scripts\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035678 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-log-httpd\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035707 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlvtf\" (UniqueName: \"kubernetes.io/projected/dd7a13f0-ff44-426e-81b6-553986fd8052-kube-api-access-mlvtf\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035726 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-combined-ca-bundle\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.035793 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-config-data\") pod \"dd7a13f0-ff44-426e-81b6-553986fd8052\" (UID: \"dd7a13f0-ff44-426e-81b6-553986fd8052\") " Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.036317 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.036762 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.042828 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7a13f0-ff44-426e-81b6-553986fd8052-kube-api-access-mlvtf" (OuterVolumeSpecName: "kube-api-access-mlvtf") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "kube-api-access-mlvtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.046720 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-scripts" (OuterVolumeSpecName: "scripts") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.069795 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.104990 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.137501 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.137534 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.137543 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.137551 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.137560 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd7a13f0-ff44-426e-81b6-553986fd8052-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.137569 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlvtf\" (UniqueName: \"kubernetes.io/projected/dd7a13f0-ff44-426e-81b6-553986fd8052-kube-api-access-mlvtf\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.145557 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-config-data" (OuterVolumeSpecName: "config-data") pod "dd7a13f0-ff44-426e-81b6-553986fd8052" (UID: "dd7a13f0-ff44-426e-81b6-553986fd8052"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.238835 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd7a13f0-ff44-426e-81b6-553986fd8052-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245078 4953 generic.go:334] "Generic (PLEG): container finished" podID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" exitCode=0 Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245108 4953 generic.go:334] "Generic (PLEG): container finished" podID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" exitCode=2 Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245117 4953 generic.go:334] "Generic (PLEG): container finished" podID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" exitCode=0 Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245124 4953 generic.go:334] "Generic (PLEG): container finished" podID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" exitCode=0 Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245126 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245147 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerDied","Data":"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa"} Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245186 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerDied","Data":"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715"} Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245199 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerDied","Data":"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0"} Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245221 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerDied","Data":"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092"} Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245234 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd7a13f0-ff44-426e-81b6-553986fd8052","Type":"ContainerDied","Data":"d90b65cf601527dc9a86e82db7e3b778b6d5548efa50e2923ebe9ab4a1a9e3db"} Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.245257 4953 scope.go:117] "RemoveContainer" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.277382 4953 scope.go:117] "RemoveContainer" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.298461 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.305627 4953 scope.go:117] "RemoveContainer" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.308798 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318190 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318541 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e88244cd-5c67-4f33-b40f-58682d29da8b" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318557 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e88244cd-5c67-4f33-b40f-58682d29da8b" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318575 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="sg-core" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318583 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="sg-core" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318592 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-notification-agent" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318598 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-notification-agent" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318627 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b2030c-9a69-4ea0-9253-9bf9a6aee523" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318635 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b2030c-9a69-4ea0-9253-9bf9a6aee523" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318649 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="proxy-httpd" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318655 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="proxy-httpd" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318662 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-central-agent" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318668 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-central-agent" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.318688 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74c049c7-ed9a-4591-a366-8a18852a3d91" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318694 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="74c049c7-ed9a-4591-a366-8a18852a3d91" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318886 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-notification-agent" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318905 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="74c049c7-ed9a-4591-a366-8a18852a3d91" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318920 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3b2030c-9a69-4ea0-9253-9bf9a6aee523" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.318931 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e88244cd-5c67-4f33-b40f-58682d29da8b" containerName="mariadb-database-create" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.319115 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="ceilometer-central-agent" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.319141 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="proxy-httpd" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.319155 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" containerName="sg-core" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.320850 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.325364 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.325915 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.346501 4953 scope.go:117] "RemoveContainer" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.358255 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.427001 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.441755 4953 scope.go:117] "RemoveContainer" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.449762 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": container with ID starting with 7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa not found: ID does not exist" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.449809 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa"} err="failed to get container status \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": rpc error: code = NotFound desc = could not find container \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": container with ID starting with 7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.449835 4953 scope.go:117] "RemoveContainer" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.453755 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": container with ID starting with 7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715 not found: ID does not exist" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.453794 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715"} err="failed to get container status \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": rpc error: code = NotFound desc = could not find container \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": container with ID starting with 7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.453818 4953 scope.go:117] "RemoveContainer" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.455914 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-log-httpd\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.455954 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.456018 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-scripts\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.456040 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c29r7\" (UniqueName: \"kubernetes.io/projected/77cc4c08-dd40-4ccc-b502-e4937c91acb4-kube-api-access-c29r7\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.456086 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-run-httpd\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.456108 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.456135 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-config-data\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.457949 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": container with ID starting with 0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0 not found: ID does not exist" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.457983 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0"} err="failed to get container status \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": rpc error: code = NotFound desc = could not find container \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": container with ID starting with 0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.458006 4953 scope.go:117] "RemoveContainer" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" Oct 11 03:04:11 crc kubenswrapper[4953]: E1011 03:04:11.464776 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": container with ID starting with a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092 not found: ID does not exist" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.464828 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092"} err="failed to get container status \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": rpc error: code = NotFound desc = could not find container \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": container with ID starting with a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.464853 4953 scope.go:117] "RemoveContainer" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.471087 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa"} err="failed to get container status \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": rpc error: code = NotFound desc = could not find container \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": container with ID starting with 7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.471135 4953 scope.go:117] "RemoveContainer" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.471927 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715"} err="failed to get container status \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": rpc error: code = NotFound desc = could not find container \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": container with ID starting with 7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.471957 4953 scope.go:117] "RemoveContainer" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.472932 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0"} err="failed to get container status \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": rpc error: code = NotFound desc = could not find container \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": container with ID starting with 0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.473007 4953 scope.go:117] "RemoveContainer" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.473334 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092"} err="failed to get container status \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": rpc error: code = NotFound desc = could not find container \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": container with ID starting with a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.473398 4953 scope.go:117] "RemoveContainer" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.473679 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa"} err="failed to get container status \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": rpc error: code = NotFound desc = could not find container \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": container with ID starting with 7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.473746 4953 scope.go:117] "RemoveContainer" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.474075 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715"} err="failed to get container status \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": rpc error: code = NotFound desc = could not find container \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": container with ID starting with 7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.474100 4953 scope.go:117] "RemoveContainer" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.474383 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0"} err="failed to get container status \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": rpc error: code = NotFound desc = could not find container \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": container with ID starting with 0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.474451 4953 scope.go:117] "RemoveContainer" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.475111 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092"} err="failed to get container status \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": rpc error: code = NotFound desc = could not find container \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": container with ID starting with a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.475151 4953 scope.go:117] "RemoveContainer" containerID="7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.475465 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa"} err="failed to get container status \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": rpc error: code = NotFound desc = could not find container \"7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa\": container with ID starting with 7e2a15f6392027688a8b9578cdb4b1cd2d9c4405e64bda8d1be9d62c2bd217aa not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.475502 4953 scope.go:117] "RemoveContainer" containerID="7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.475966 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715"} err="failed to get container status \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": rpc error: code = NotFound desc = could not find container \"7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715\": container with ID starting with 7bd89214b2be4f880b6d47a5d522f1243951b00320921829b83802f509349715 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.475992 4953 scope.go:117] "RemoveContainer" containerID="0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.476308 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0"} err="failed to get container status \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": rpc error: code = NotFound desc = could not find container \"0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0\": container with ID starting with 0b9415408134b2c94fc29dc135d44154231d68cadb71fe218d72056ed64181d0 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.476348 4953 scope.go:117] "RemoveContainer" containerID="a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.477055 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092"} err="failed to get container status \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": rpc error: code = NotFound desc = could not find container \"a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092\": container with ID starting with a9e563dff5180403a25429c414cac9e15afd87534b7107d46c1d31bc6ad0c092 not found: ID does not exist" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.546324 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb684768f-s8rxg"] Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.546585 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" containerName="dnsmasq-dns" containerID="cri-o://0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2" gracePeriod=10 Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.557432 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-log-httpd\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.557480 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.557561 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-scripts\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.557582 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c29r7\" (UniqueName: \"kubernetes.io/projected/77cc4c08-dd40-4ccc-b502-e4937c91acb4-kube-api-access-c29r7\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.562738 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-run-httpd\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.562777 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.562829 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-config-data\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.563496 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-log-httpd\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.565631 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-run-httpd\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.568279 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.573359 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-scripts\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.575901 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.579306 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-config-data\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.598247 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c29r7\" (UniqueName: \"kubernetes.io/projected/77cc4c08-dd40-4ccc-b502-e4937c91acb4-kube-api-access-c29r7\") pod \"ceilometer-0\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.739862 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.818126 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd7a13f0-ff44-426e-81b6-553986fd8052" path="/var/lib/kubelet/pods/dd7a13f0-ff44-426e-81b6-553986fd8052/volumes" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.917939 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 11 03:04:11 crc kubenswrapper[4953]: I1011 03:04:11.955524 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.076054 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.176310 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-config\") pod \"5fe23efa-4623-415c-93ea-1bb629010c99\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.176357 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-nb\") pod \"5fe23efa-4623-415c-93ea-1bb629010c99\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.176466 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbr9m\" (UniqueName: \"kubernetes.io/projected/5fe23efa-4623-415c-93ea-1bb629010c99-kube-api-access-qbr9m\") pod \"5fe23efa-4623-415c-93ea-1bb629010c99\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.176496 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-sb\") pod \"5fe23efa-4623-415c-93ea-1bb629010c99\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.176512 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-dns-svc\") pod \"5fe23efa-4623-415c-93ea-1bb629010c99\" (UID: \"5fe23efa-4623-415c-93ea-1bb629010c99\") " Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.184321 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe23efa-4623-415c-93ea-1bb629010c99-kube-api-access-qbr9m" (OuterVolumeSpecName: "kube-api-access-qbr9m") pod "5fe23efa-4623-415c-93ea-1bb629010c99" (UID: "5fe23efa-4623-415c-93ea-1bb629010c99"). InnerVolumeSpecName "kube-api-access-qbr9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.239959 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5fe23efa-4623-415c-93ea-1bb629010c99" (UID: "5fe23efa-4623-415c-93ea-1bb629010c99"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.240559 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-config" (OuterVolumeSpecName: "config") pod "5fe23efa-4623-415c-93ea-1bb629010c99" (UID: "5fe23efa-4623-415c-93ea-1bb629010c99"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.251918 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5fe23efa-4623-415c-93ea-1bb629010c99" (UID: "5fe23efa-4623-415c-93ea-1bb629010c99"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259361 4953 generic.go:334] "Generic (PLEG): container finished" podID="5fe23efa-4623-415c-93ea-1bb629010c99" containerID="0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2" exitCode=0 Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259410 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259448 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" event={"ID":"5fe23efa-4623-415c-93ea-1bb629010c99","Type":"ContainerDied","Data":"0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2"} Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259500 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb684768f-s8rxg" event={"ID":"5fe23efa-4623-415c-93ea-1bb629010c99","Type":"ContainerDied","Data":"74618b6e536ef742f69432576e9f767eff0c44e286a13f2a8b16a952e8b3b1ba"} Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259524 4953 scope.go:117] "RemoveContainer" containerID="0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259865 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="cinder-scheduler" containerID="cri-o://1ae0015f4fa143363a5c595731567e0a98c5860c610dac7a56810fd10db03f30" gracePeriod=30 Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.259925 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="probe" containerID="cri-o://bafd156a842f3a8f629b515f2db73a5f408ef40ff7441e1ec7e3a7d97c7995ad" gracePeriod=30 Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.266220 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5fe23efa-4623-415c-93ea-1bb629010c99" (UID: "5fe23efa-4623-415c-93ea-1bb629010c99"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.278989 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.279023 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.279034 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbr9m\" (UniqueName: \"kubernetes.io/projected/5fe23efa-4623-415c-93ea-1bb629010c99-kube-api-access-qbr9m\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.279043 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.279050 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe23efa-4623-415c-93ea-1bb629010c99-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.288235 4953 scope.go:117] "RemoveContainer" containerID="521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.316878 4953 scope.go:117] "RemoveContainer" containerID="0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2" Oct 11 03:04:12 crc kubenswrapper[4953]: E1011 03:04:12.317417 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2\": container with ID starting with 0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2 not found: ID does not exist" containerID="0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.317471 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2"} err="failed to get container status \"0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2\": rpc error: code = NotFound desc = could not find container \"0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2\": container with ID starting with 0639ad6465b9236b20313da08aba131e30a5ba77be8f45dac585098d702bd1c2 not found: ID does not exist" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.317522 4953 scope.go:117] "RemoveContainer" containerID="521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da" Oct 11 03:04:12 crc kubenswrapper[4953]: E1011 03:04:12.318034 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da\": container with ID starting with 521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da not found: ID does not exist" containerID="521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.318073 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da"} err="failed to get container status \"521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da\": rpc error: code = NotFound desc = could not find container \"521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da\": container with ID starting with 521d902320eefcaa384abbb98bcc7acd1e1853a5348c18e862ee43a32afff8da not found: ID does not exist" Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.320021 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:12 crc kubenswrapper[4953]: W1011 03:04:12.323179 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77cc4c08_dd40_4ccc_b502_e4937c91acb4.slice/crio-801b61a69335ad4734cefa09f978f01671d97926e30a648b64a601a48d35c63d WatchSource:0}: Error finding container 801b61a69335ad4734cefa09f978f01671d97926e30a648b64a601a48d35c63d: Status 404 returned error can't find the container with id 801b61a69335ad4734cefa09f978f01671d97926e30a648b64a601a48d35c63d Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.606985 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb684768f-s8rxg"] Oct 11 03:04:12 crc kubenswrapper[4953]: I1011 03:04:12.620942 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb684768f-s8rxg"] Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.267974 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerStarted","Data":"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204"} Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.269030 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerStarted","Data":"801b61a69335ad4734cefa09f978f01671d97926e30a648b64a601a48d35c63d"} Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.270078 4953 generic.go:334] "Generic (PLEG): container finished" podID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerID="bafd156a842f3a8f629b515f2db73a5f408ef40ff7441e1ec7e3a7d97c7995ad" exitCode=0 Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.270126 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"947f2af4-1ca5-4da4-8a24-0644845addd2","Type":"ContainerDied","Data":"bafd156a842f3a8f629b515f2db73a5f408ef40ff7441e1ec7e3a7d97c7995ad"} Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.814451 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" path="/var/lib/kubelet/pods/5fe23efa-4623-415c-93ea-1bb629010c99/volumes" Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.816155 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:13 crc kubenswrapper[4953]: I1011 03:04:13.901115 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 11 03:04:14 crc kubenswrapper[4953]: I1011 03:04:14.285081 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerStarted","Data":"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41"} Oct 11 03:04:14 crc kubenswrapper[4953]: I1011 03:04:14.381675 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:04:15 crc kubenswrapper[4953]: I1011 03:04:15.297505 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerStarted","Data":"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49"} Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.319030 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-central-agent" containerID="cri-o://6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" gracePeriod=30 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.319459 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerStarted","Data":"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228"} Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.323700 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.320065 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="proxy-httpd" containerID="cri-o://88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" gracePeriod=30 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.320188 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-notification-agent" containerID="cri-o://39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" gracePeriod=30 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.320040 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="sg-core" containerID="cri-o://42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" gracePeriod=30 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.332452 4953 generic.go:334] "Generic (PLEG): container finished" podID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerID="1ae0015f4fa143363a5c595731567e0a98c5860c610dac7a56810fd10db03f30" exitCode=0 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.332546 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"947f2af4-1ca5-4da4-8a24-0644845addd2","Type":"ContainerDied","Data":"1ae0015f4fa143363a5c595731567e0a98c5860c610dac7a56810fd10db03f30"} Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.438095 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.471367 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.180740549 podStartE2EDuration="5.471347703s" podCreationTimestamp="2025-10-11 03:04:11 +0000 UTC" firstStartedPulling="2025-10-11 03:04:12.325388442 +0000 UTC m=+1063.258476096" lastFinishedPulling="2025-10-11 03:04:15.615995606 +0000 UTC m=+1066.549083250" observedRunningTime="2025-10-11 03:04:16.35305401 +0000 UTC m=+1067.286141654" watchObservedRunningTime="2025-10-11 03:04:16.471347703 +0000 UTC m=+1067.404435357" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579402 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data\") pod \"947f2af4-1ca5-4da4-8a24-0644845addd2\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579543 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-scripts\") pod \"947f2af4-1ca5-4da4-8a24-0644845addd2\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579585 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data-custom\") pod \"947f2af4-1ca5-4da4-8a24-0644845addd2\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579743 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv2qc\" (UniqueName: \"kubernetes.io/projected/947f2af4-1ca5-4da4-8a24-0644845addd2-kube-api-access-fv2qc\") pod \"947f2af4-1ca5-4da4-8a24-0644845addd2\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579794 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/947f2af4-1ca5-4da4-8a24-0644845addd2-etc-machine-id\") pod \"947f2af4-1ca5-4da4-8a24-0644845addd2\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579956 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/947f2af4-1ca5-4da4-8a24-0644845addd2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "947f2af4-1ca5-4da4-8a24-0644845addd2" (UID: "947f2af4-1ca5-4da4-8a24-0644845addd2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.579965 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-combined-ca-bundle\") pod \"947f2af4-1ca5-4da4-8a24-0644845addd2\" (UID: \"947f2af4-1ca5-4da4-8a24-0644845addd2\") " Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.580406 4953 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/947f2af4-1ca5-4da4-8a24-0644845addd2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.585811 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/947f2af4-1ca5-4da4-8a24-0644845addd2-kube-api-access-fv2qc" (OuterVolumeSpecName: "kube-api-access-fv2qc") pod "947f2af4-1ca5-4da4-8a24-0644845addd2" (UID: "947f2af4-1ca5-4da4-8a24-0644845addd2"). InnerVolumeSpecName "kube-api-access-fv2qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.585921 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-scripts" (OuterVolumeSpecName: "scripts") pod "947f2af4-1ca5-4da4-8a24-0644845addd2" (UID: "947f2af4-1ca5-4da4-8a24-0644845addd2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.587782 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "947f2af4-1ca5-4da4-8a24-0644845addd2" (UID: "947f2af4-1ca5-4da4-8a24-0644845addd2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.605724 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5658f8676c-24292" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.644834 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "947f2af4-1ca5-4da4-8a24-0644845addd2" (UID: "947f2af4-1ca5-4da4-8a24-0644845addd2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.681987 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ff7cc76b4-qffvt"] Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.682217 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-ff7cc76b4-qffvt" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-api" containerID="cri-o://05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d" gracePeriod=30 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.682644 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-ff7cc76b4-qffvt" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-httpd" containerID="cri-o://7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434" gracePeriod=30 Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.682668 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.682695 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.682708 4953 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.682721 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv2qc\" (UniqueName: \"kubernetes.io/projected/947f2af4-1ca5-4da4-8a24-0644845addd2-kube-api-access-fv2qc\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.752058 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data" (OuterVolumeSpecName: "config-data") pod "947f2af4-1ca5-4da4-8a24-0644845addd2" (UID: "947f2af4-1ca5-4da4-8a24-0644845addd2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:16 crc kubenswrapper[4953]: I1011 03:04:16.785816 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/947f2af4-1ca5-4da4-8a24-0644845addd2-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.071406 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192415 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-run-httpd\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192524 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-config-data\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192559 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-sg-core-conf-yaml\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192594 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c29r7\" (UniqueName: \"kubernetes.io/projected/77cc4c08-dd40-4ccc-b502-e4937c91acb4-kube-api-access-c29r7\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192758 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-combined-ca-bundle\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192795 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-scripts\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192879 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-log-httpd\") pod \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\" (UID: \"77cc4c08-dd40-4ccc-b502-e4937c91acb4\") " Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.192875 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.193293 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.193700 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.196423 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-scripts" (OuterVolumeSpecName: "scripts") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.198688 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77cc4c08-dd40-4ccc-b502-e4937c91acb4-kube-api-access-c29r7" (OuterVolumeSpecName: "kube-api-access-c29r7") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "kube-api-access-c29r7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.216353 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.274692 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.295865 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.296158 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c29r7\" (UniqueName: \"kubernetes.io/projected/77cc4c08-dd40-4ccc-b502-e4937c91acb4-kube-api-access-c29r7\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.296349 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.296360 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.296369 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/77cc4c08-dd40-4ccc-b502-e4937c91acb4-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.322936 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-config-data" (OuterVolumeSpecName: "config-data") pod "77cc4c08-dd40-4ccc-b502-e4937c91acb4" (UID: "77cc4c08-dd40-4ccc-b502-e4937c91acb4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345698 4953 generic.go:334] "Generic (PLEG): container finished" podID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" exitCode=0 Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345734 4953 generic.go:334] "Generic (PLEG): container finished" podID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" exitCode=2 Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345744 4953 generic.go:334] "Generic (PLEG): container finished" podID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" exitCode=0 Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345752 4953 generic.go:334] "Generic (PLEG): container finished" podID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" exitCode=0 Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345793 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerDied","Data":"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345818 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerDied","Data":"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345830 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerDied","Data":"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345840 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerDied","Data":"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345850 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"77cc4c08-dd40-4ccc-b502-e4937c91acb4","Type":"ContainerDied","Data":"801b61a69335ad4734cefa09f978f01671d97926e30a648b64a601a48d35c63d"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345866 4953 scope.go:117] "RemoveContainer" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.345976 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.350245 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"947f2af4-1ca5-4da4-8a24-0644845addd2","Type":"ContainerDied","Data":"b30322ad5a48dc33b84a75016d769842ae6ebc6ef4bd884ccbf3afad9e5ae3e2"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.350267 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.354950 4953 generic.go:334] "Generic (PLEG): container finished" podID="d7a01ec2-7204-4043-a802-21ca042eea29" containerID="7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434" exitCode=0 Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.355011 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff7cc76b4-qffvt" event={"ID":"d7a01ec2-7204-4043-a802-21ca042eea29","Type":"ContainerDied","Data":"7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434"} Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.365942 4953 scope.go:117] "RemoveContainer" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.381825 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.391153 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.397456 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77cc4c08-dd40-4ccc-b502-e4937c91acb4-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.404919 4953 scope.go:117] "RemoveContainer" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.407530 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.407976 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="cinder-scheduler" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408002 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="cinder-scheduler" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408023 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" containerName="dnsmasq-dns" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408030 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" containerName="dnsmasq-dns" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408215 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-central-agent" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408223 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-central-agent" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408247 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" containerName="init" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408255 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" containerName="init" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408273 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="probe" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408289 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="probe" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408299 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="sg-core" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408305 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="sg-core" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408324 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-notification-agent" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408333 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-notification-agent" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.408346 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="proxy-httpd" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408355 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="proxy-httpd" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408563 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="sg-core" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408588 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-notification-agent" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408615 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fe23efa-4623-415c-93ea-1bb629010c99" containerName="dnsmasq-dns" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408630 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="cinder-scheduler" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408639 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="proxy-httpd" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408656 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" containerName="probe" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.408667 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" containerName="ceilometer-central-agent" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.409897 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.412070 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.437631 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.438427 4953 scope.go:117] "RemoveContainer" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.448704 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.460005 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.464411 4953 scope.go:117] "RemoveContainer" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.464822 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": container with ID starting with 88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228 not found: ID does not exist" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.464870 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228"} err="failed to get container status \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": rpc error: code = NotFound desc = could not find container \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": container with ID starting with 88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.464908 4953 scope.go:117] "RemoveContainer" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.465159 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": container with ID starting with 42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49 not found: ID does not exist" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465197 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49"} err="failed to get container status \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": rpc error: code = NotFound desc = could not find container \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": container with ID starting with 42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465214 4953 scope.go:117] "RemoveContainer" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.465408 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": container with ID starting with 39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41 not found: ID does not exist" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465457 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41"} err="failed to get container status \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": rpc error: code = NotFound desc = could not find container \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": container with ID starting with 39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465469 4953 scope.go:117] "RemoveContainer" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" Oct 11 03:04:17 crc kubenswrapper[4953]: E1011 03:04:17.465643 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": container with ID starting with 6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204 not found: ID does not exist" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465658 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204"} err="failed to get container status \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": rpc error: code = NotFound desc = could not find container \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": container with ID starting with 6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465670 4953 scope.go:117] "RemoveContainer" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465816 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228"} err="failed to get container status \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": rpc error: code = NotFound desc = could not find container \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": container with ID starting with 88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465829 4953 scope.go:117] "RemoveContainer" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465979 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49"} err="failed to get container status \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": rpc error: code = NotFound desc = could not find container \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": container with ID starting with 42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.465994 4953 scope.go:117] "RemoveContainer" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466183 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41"} err="failed to get container status \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": rpc error: code = NotFound desc = could not find container \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": container with ID starting with 39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466205 4953 scope.go:117] "RemoveContainer" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466359 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204"} err="failed to get container status \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": rpc error: code = NotFound desc = could not find container \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": container with ID starting with 6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466377 4953 scope.go:117] "RemoveContainer" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466557 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228"} err="failed to get container status \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": rpc error: code = NotFound desc = could not find container \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": container with ID starting with 88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466574 4953 scope.go:117] "RemoveContainer" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466793 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49"} err="failed to get container status \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": rpc error: code = NotFound desc = could not find container \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": container with ID starting with 42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.466815 4953 scope.go:117] "RemoveContainer" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467009 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41"} err="failed to get container status \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": rpc error: code = NotFound desc = could not find container \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": container with ID starting with 39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467024 4953 scope.go:117] "RemoveContainer" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467204 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204"} err="failed to get container status \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": rpc error: code = NotFound desc = could not find container \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": container with ID starting with 6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467223 4953 scope.go:117] "RemoveContainer" containerID="88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467404 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228"} err="failed to get container status \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": rpc error: code = NotFound desc = could not find container \"88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228\": container with ID starting with 88ad766c6cf9318f7d6a174b820052bb9e31f9dd19e37f79cbf3e27517436228 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467420 4953 scope.go:117] "RemoveContainer" containerID="42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467576 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49"} err="failed to get container status \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": rpc error: code = NotFound desc = could not find container \"42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49\": container with ID starting with 42aff2367339ba6f3a7afea7701d0474dbeebda83e20cb1f5a8aadac9b3d3f49 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467589 4953 scope.go:117] "RemoveContainer" containerID="39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467754 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41"} err="failed to get container status \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": rpc error: code = NotFound desc = could not find container \"39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41\": container with ID starting with 39c01c9a42de048ca5d0f42927db3039bfec6a207878cb8396a3b42fac290f41 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.467767 4953 scope.go:117] "RemoveContainer" containerID="6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.468076 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204"} err="failed to get container status \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": rpc error: code = NotFound desc = could not find container \"6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204\": container with ID starting with 6ee2d67f67482ef310e164a2c517e4ad42b765e1b971012e995544f7a77a9204 not found: ID does not exist" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.468096 4953 scope.go:117] "RemoveContainer" containerID="bafd156a842f3a8f629b515f2db73a5f408ef40ff7441e1ec7e3a7d97c7995ad" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.469770 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.471690 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.479692 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.479702 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.487212 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.499982 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-config-data\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.500057 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fa64359f-1864-4e68-8a0e-df12bf6a204b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.500147 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-scripts\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.500196 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.500224 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.500262 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6whw\" (UniqueName: \"kubernetes.io/projected/fa64359f-1864-4e68-8a0e-df12bf6a204b-kube-api-access-t6whw\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.508092 4953 scope.go:117] "RemoveContainer" containerID="1ae0015f4fa143363a5c595731567e0a98c5860c610dac7a56810fd10db03f30" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.602072 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6whw\" (UniqueName: \"kubernetes.io/projected/fa64359f-1864-4e68-8a0e-df12bf6a204b-kube-api-access-t6whw\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.602146 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.602719 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-config-data\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603348 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-config-data\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603387 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctgnc\" (UniqueName: \"kubernetes.io/projected/37888789-6e04-42df-af67-132ec4e126d7-kube-api-access-ctgnc\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603417 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fa64359f-1864-4e68-8a0e-df12bf6a204b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603452 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-scripts\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603512 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603562 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-scripts\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603585 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-run-httpd\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603626 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-log-httpd\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603661 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.603914 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fa64359f-1864-4e68-8a0e-df12bf6a204b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.606911 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.607308 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-config-data\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.607370 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-scripts\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.609199 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.609680 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa64359f-1864-4e68-8a0e-df12bf6a204b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.617200 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6whw\" (UniqueName: \"kubernetes.io/projected/fa64359f-1864-4e68-8a0e-df12bf6a204b-kube-api-access-t6whw\") pod \"cinder-scheduler-0\" (UID: \"fa64359f-1864-4e68-8a0e-df12bf6a204b\") " pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708542 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708644 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-log-httpd\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708666 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-run-httpd\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708739 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708783 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-config-data\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708813 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctgnc\" (UniqueName: \"kubernetes.io/projected/37888789-6e04-42df-af67-132ec4e126d7-kube-api-access-ctgnc\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.708848 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-scripts\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.709215 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-log-httpd\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.709493 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-run-httpd\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.711725 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.711880 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-scripts\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.712206 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.714284 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-config-data\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.728067 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctgnc\" (UniqueName: \"kubernetes.io/projected/37888789-6e04-42df-af67-132ec4e126d7-kube-api-access-ctgnc\") pod \"ceilometer-0\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.737857 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.800400 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.813566 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77cc4c08-dd40-4ccc-b502-e4937c91acb4" path="/var/lib/kubelet/pods/77cc4c08-dd40-4ccc-b502-e4937c91acb4/volumes" Oct 11 03:04:17 crc kubenswrapper[4953]: I1011 03:04:17.814308 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="947f2af4-1ca5-4da4-8a24-0644845addd2" path="/var/lib/kubelet/pods/947f2af4-1ca5-4da4-8a24-0644845addd2/volumes" Oct 11 03:04:18 crc kubenswrapper[4953]: I1011 03:04:18.222417 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 11 03:04:18 crc kubenswrapper[4953]: W1011 03:04:18.242230 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa64359f_1864_4e68_8a0e_df12bf6a204b.slice/crio-340ef68ca042ecbe911567499ebf7c2f827e9d2578612e993b294ad85d8e215c WatchSource:0}: Error finding container 340ef68ca042ecbe911567499ebf7c2f827e9d2578612e993b294ad85d8e215c: Status 404 returned error can't find the container with id 340ef68ca042ecbe911567499ebf7c2f827e9d2578612e993b294ad85d8e215c Oct 11 03:04:18 crc kubenswrapper[4953]: I1011 03:04:18.294509 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:18 crc kubenswrapper[4953]: W1011 03:04:18.321394 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37888789_6e04_42df_af67_132ec4e126d7.slice/crio-5f9db1ab398591cb190fc9181a10f6927da6615bd6325586ecdba05883d6d565 WatchSource:0}: Error finding container 5f9db1ab398591cb190fc9181a10f6927da6615bd6325586ecdba05883d6d565: Status 404 returned error can't find the container with id 5f9db1ab398591cb190fc9181a10f6927da6615bd6325586ecdba05883d6d565 Oct 11 03:04:18 crc kubenswrapper[4953]: I1011 03:04:18.371906 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fa64359f-1864-4e68-8a0e-df12bf6a204b","Type":"ContainerStarted","Data":"340ef68ca042ecbe911567499ebf7c2f827e9d2578612e993b294ad85d8e215c"} Oct 11 03:04:18 crc kubenswrapper[4953]: I1011 03:04:18.373332 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerStarted","Data":"5f9db1ab398591cb190fc9181a10f6927da6615bd6325586ecdba05883d6d565"} Oct 11 03:04:19 crc kubenswrapper[4953]: I1011 03:04:19.387401 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerStarted","Data":"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7"} Oct 11 03:04:19 crc kubenswrapper[4953]: I1011 03:04:19.388822 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fa64359f-1864-4e68-8a0e-df12bf6a204b","Type":"ContainerStarted","Data":"d17c62ce930a62fbac5ab93a3da6811509b775592a66f593fba0d1aa6b1ca0c0"} Oct 11 03:04:20 crc kubenswrapper[4953]: I1011 03:04:20.398444 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerStarted","Data":"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660"} Oct 11 03:04:20 crc kubenswrapper[4953]: I1011 03:04:20.402739 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fa64359f-1864-4e68-8a0e-df12bf6a204b","Type":"ContainerStarted","Data":"2657586f966b49fc89db3af41ff5f9df95e95d8de09b63822275b80d606d09ce"} Oct 11 03:04:20 crc kubenswrapper[4953]: I1011 03:04:20.425036 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.425022906 podStartE2EDuration="3.425022906s" podCreationTimestamp="2025-10-11 03:04:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:04:20.421380773 +0000 UTC m=+1071.354468417" watchObservedRunningTime="2025-10-11 03:04:20.425022906 +0000 UTC m=+1071.358110550" Oct 11 03:04:21 crc kubenswrapper[4953]: I1011 03:04:21.935426 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-4f27-account-create-x95m2"] Oct 11 03:04:21 crc kubenswrapper[4953]: I1011 03:04:21.936713 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:21 crc kubenswrapper[4953]: I1011 03:04:21.941579 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 11 03:04:21 crc kubenswrapper[4953]: I1011 03:04:21.949021 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4f27-account-create-x95m2"] Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.108881 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-982b7\" (UniqueName: \"kubernetes.io/projected/105a7128-d0fb-4e01-a86a-d9305eedd58b-kube-api-access-982b7\") pod \"nova-api-4f27-account-create-x95m2\" (UID: \"105a7128-d0fb-4e01-a86a-d9305eedd58b\") " pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.167539 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-896b-account-create-cfp9q"] Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.168856 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.170487 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.178377 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-896b-account-create-cfp9q"] Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.210631 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-982b7\" (UniqueName: \"kubernetes.io/projected/105a7128-d0fb-4e01-a86a-d9305eedd58b-kube-api-access-982b7\") pod \"nova-api-4f27-account-create-x95m2\" (UID: \"105a7128-d0fb-4e01-a86a-d9305eedd58b\") " pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.227056 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-982b7\" (UniqueName: \"kubernetes.io/projected/105a7128-d0fb-4e01-a86a-d9305eedd58b-kube-api-access-982b7\") pod \"nova-api-4f27-account-create-x95m2\" (UID: \"105a7128-d0fb-4e01-a86a-d9305eedd58b\") " pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.274915 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-51f7-account-create-t84pj"] Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.277479 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.285232 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.298654 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-51f7-account-create-t84pj"] Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.299122 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.321213 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v58s2\" (UniqueName: \"kubernetes.io/projected/9c547aa6-ce47-47ee-a11d-85f588aab4d3-kube-api-access-v58s2\") pod \"nova-cell0-896b-account-create-cfp9q\" (UID: \"9c547aa6-ce47-47ee-a11d-85f588aab4d3\") " pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.428216 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerStarted","Data":"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce"} Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.441909 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v58s2\" (UniqueName: \"kubernetes.io/projected/9c547aa6-ce47-47ee-a11d-85f588aab4d3-kube-api-access-v58s2\") pod \"nova-cell0-896b-account-create-cfp9q\" (UID: \"9c547aa6-ce47-47ee-a11d-85f588aab4d3\") " pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.442049 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcb75\" (UniqueName: \"kubernetes.io/projected/e8590eb8-db8f-45b8-9917-4bfed4ebec04-kube-api-access-xcb75\") pod \"nova-cell1-51f7-account-create-t84pj\" (UID: \"e8590eb8-db8f-45b8-9917-4bfed4ebec04\") " pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.471839 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v58s2\" (UniqueName: \"kubernetes.io/projected/9c547aa6-ce47-47ee-a11d-85f588aab4d3-kube-api-access-v58s2\") pod \"nova-cell0-896b-account-create-cfp9q\" (UID: \"9c547aa6-ce47-47ee-a11d-85f588aab4d3\") " pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.491368 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.545005 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcb75\" (UniqueName: \"kubernetes.io/projected/e8590eb8-db8f-45b8-9917-4bfed4ebec04-kube-api-access-xcb75\") pod \"nova-cell1-51f7-account-create-t84pj\" (UID: \"e8590eb8-db8f-45b8-9917-4bfed4ebec04\") " pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.560982 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcb75\" (UniqueName: \"kubernetes.io/projected/e8590eb8-db8f-45b8-9917-4bfed4ebec04-kube-api-access-xcb75\") pod \"nova-cell1-51f7-account-create-t84pj\" (UID: \"e8590eb8-db8f-45b8-9917-4bfed4ebec04\") " pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.696383 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.738936 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.763620 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4f27-account-create-x95m2"] Oct 11 03:04:22 crc kubenswrapper[4953]: I1011 03:04:22.920025 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-896b-account-create-cfp9q"] Oct 11 03:04:23 crc kubenswrapper[4953]: W1011 03:04:23.132548 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8590eb8_db8f_45b8_9917_4bfed4ebec04.slice/crio-eb277c92d576a9d8f460bbfbdad86eb05cf1a3c955cbd3b4cf52da777b38e4f9 WatchSource:0}: Error finding container eb277c92d576a9d8f460bbfbdad86eb05cf1a3c955cbd3b4cf52da777b38e4f9: Status 404 returned error can't find the container with id eb277c92d576a9d8f460bbfbdad86eb05cf1a3c955cbd3b4cf52da777b38e4f9 Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.163352 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-51f7-account-create-t84pj"] Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.305810 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.440413 4953 generic.go:334] "Generic (PLEG): container finished" podID="d7a01ec2-7204-4043-a802-21ca042eea29" containerID="05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d" exitCode=0 Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.440465 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ff7cc76b4-qffvt" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.440476 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff7cc76b4-qffvt" event={"ID":"d7a01ec2-7204-4043-a802-21ca042eea29","Type":"ContainerDied","Data":"05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.440500 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ff7cc76b4-qffvt" event={"ID":"d7a01ec2-7204-4043-a802-21ca042eea29","Type":"ContainerDied","Data":"ae181d45c4cd11f8c04fa12d535f57634d7c2e3ea7dd7aded4da5ce99da52672"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.440515 4953 scope.go:117] "RemoveContainer" containerID="7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.442670 4953 generic.go:334] "Generic (PLEG): container finished" podID="105a7128-d0fb-4e01-a86a-d9305eedd58b" containerID="e7fd23c4fbb9377d9b6440704658cf9ae063f20bfa63b936b1e69d01670f2b44" exitCode=0 Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.442748 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4f27-account-create-x95m2" event={"ID":"105a7128-d0fb-4e01-a86a-d9305eedd58b","Type":"ContainerDied","Data":"e7fd23c4fbb9377d9b6440704658cf9ae063f20bfa63b936b1e69d01670f2b44"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.442771 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4f27-account-create-x95m2" event={"ID":"105a7128-d0fb-4e01-a86a-d9305eedd58b","Type":"ContainerStarted","Data":"bffdd30de8df0d4ff656221813808f2b748dfbd1044b9faf8ee10fd938e67a23"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.448732 4953 generic.go:334] "Generic (PLEG): container finished" podID="9c547aa6-ce47-47ee-a11d-85f588aab4d3" containerID="58e34fa26fc115520c322fe73fa97e032b15f5bca52328e61bfa8306f6362364" exitCode=0 Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.448867 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-896b-account-create-cfp9q" event={"ID":"9c547aa6-ce47-47ee-a11d-85f588aab4d3","Type":"ContainerDied","Data":"58e34fa26fc115520c322fe73fa97e032b15f5bca52328e61bfa8306f6362364"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.448897 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-896b-account-create-cfp9q" event={"ID":"9c547aa6-ce47-47ee-a11d-85f588aab4d3","Type":"ContainerStarted","Data":"d6a8d46fe3d49b788fc9991ce142ef99117699e2cbba08b2ca203f4527c24f05"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.450548 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-51f7-account-create-t84pj" event={"ID":"e8590eb8-db8f-45b8-9917-4bfed4ebec04","Type":"ContainerStarted","Data":"b78d5429605377d8a93204dd0cf0ddbffb46ac8f11b4155f41cb9d306478c3d4"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.450650 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-51f7-account-create-t84pj" event={"ID":"e8590eb8-db8f-45b8-9917-4bfed4ebec04","Type":"ContainerStarted","Data":"eb277c92d576a9d8f460bbfbdad86eb05cf1a3c955cbd3b4cf52da777b38e4f9"} Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.461034 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-httpd-config\") pod \"d7a01ec2-7204-4043-a802-21ca042eea29\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.461125 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhcqq\" (UniqueName: \"kubernetes.io/projected/d7a01ec2-7204-4043-a802-21ca042eea29-kube-api-access-dhcqq\") pod \"d7a01ec2-7204-4043-a802-21ca042eea29\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.461225 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-ovndb-tls-certs\") pod \"d7a01ec2-7204-4043-a802-21ca042eea29\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.461322 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-config\") pod \"d7a01ec2-7204-4043-a802-21ca042eea29\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.461597 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-combined-ca-bundle\") pod \"d7a01ec2-7204-4043-a802-21ca042eea29\" (UID: \"d7a01ec2-7204-4043-a802-21ca042eea29\") " Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.468165 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "d7a01ec2-7204-4043-a802-21ca042eea29" (UID: "d7a01ec2-7204-4043-a802-21ca042eea29"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.472165 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7a01ec2-7204-4043-a802-21ca042eea29-kube-api-access-dhcqq" (OuterVolumeSpecName: "kube-api-access-dhcqq") pod "d7a01ec2-7204-4043-a802-21ca042eea29" (UID: "d7a01ec2-7204-4043-a802-21ca042eea29"). InnerVolumeSpecName "kube-api-access-dhcqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.510185 4953 scope.go:117] "RemoveContainer" containerID="05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.526347 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-config" (OuterVolumeSpecName: "config") pod "d7a01ec2-7204-4043-a802-21ca042eea29" (UID: "d7a01ec2-7204-4043-a802-21ca042eea29"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.538424 4953 scope.go:117] "RemoveContainer" containerID="7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434" Oct 11 03:04:23 crc kubenswrapper[4953]: E1011 03:04:23.538811 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434\": container with ID starting with 7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434 not found: ID does not exist" containerID="7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.538850 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434"} err="failed to get container status \"7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434\": rpc error: code = NotFound desc = could not find container \"7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434\": container with ID starting with 7f154c4beff058908bb532ce7666410f0e9cd371018772ec5c04767c8002f434 not found: ID does not exist" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.538875 4953 scope.go:117] "RemoveContainer" containerID="05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d" Oct 11 03:04:23 crc kubenswrapper[4953]: E1011 03:04:23.539167 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d\": container with ID starting with 05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d not found: ID does not exist" containerID="05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.539195 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d"} err="failed to get container status \"05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d\": rpc error: code = NotFound desc = could not find container \"05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d\": container with ID starting with 05ca924c3593a2c5f6c0e54de5a59ea9c6acba05d6a303cffe69e729cb98b35d not found: ID does not exist" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.555135 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7a01ec2-7204-4043-a802-21ca042eea29" (UID: "d7a01ec2-7204-4043-a802-21ca042eea29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.563888 4953 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.563954 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhcqq\" (UniqueName: \"kubernetes.io/projected/d7a01ec2-7204-4043-a802-21ca042eea29-kube-api-access-dhcqq\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.563967 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.564004 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.572189 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "d7a01ec2-7204-4043-a802-21ca042eea29" (UID: "d7a01ec2-7204-4043-a802-21ca042eea29"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.665743 4953 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a01ec2-7204-4043-a802-21ca042eea29-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.787872 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ff7cc76b4-qffvt"] Oct 11 03:04:23 crc kubenswrapper[4953]: I1011 03:04:23.820448 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ff7cc76b4-qffvt"] Oct 11 03:04:24 crc kubenswrapper[4953]: I1011 03:04:24.468163 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerStarted","Data":"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070"} Oct 11 03:04:24 crc kubenswrapper[4953]: I1011 03:04:24.468365 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:04:24 crc kubenswrapper[4953]: I1011 03:04:24.471822 4953 generic.go:334] "Generic (PLEG): container finished" podID="e8590eb8-db8f-45b8-9917-4bfed4ebec04" containerID="b78d5429605377d8a93204dd0cf0ddbffb46ac8f11b4155f41cb9d306478c3d4" exitCode=0 Oct 11 03:04:24 crc kubenswrapper[4953]: I1011 03:04:24.471996 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-51f7-account-create-t84pj" event={"ID":"e8590eb8-db8f-45b8-9917-4bfed4ebec04","Type":"ContainerDied","Data":"b78d5429605377d8a93204dd0cf0ddbffb46ac8f11b4155f41cb9d306478c3d4"} Oct 11 03:04:24 crc kubenswrapper[4953]: I1011 03:04:24.502818 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.238631786 podStartE2EDuration="7.502797909s" podCreationTimestamp="2025-10-11 03:04:17 +0000 UTC" firstStartedPulling="2025-10-11 03:04:18.323658332 +0000 UTC m=+1069.256745976" lastFinishedPulling="2025-10-11 03:04:23.587824415 +0000 UTC m=+1074.520912099" observedRunningTime="2025-10-11 03:04:24.495253967 +0000 UTC m=+1075.428341621" watchObservedRunningTime="2025-10-11 03:04:24.502797909 +0000 UTC m=+1075.435885563" Oct 11 03:04:24 crc kubenswrapper[4953]: I1011 03:04:24.921010 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.007739 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcb75\" (UniqueName: \"kubernetes.io/projected/e8590eb8-db8f-45b8-9917-4bfed4ebec04-kube-api-access-xcb75\") pod \"e8590eb8-db8f-45b8-9917-4bfed4ebec04\" (UID: \"e8590eb8-db8f-45b8-9917-4bfed4ebec04\") " Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.015312 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8590eb8-db8f-45b8-9917-4bfed4ebec04-kube-api-access-xcb75" (OuterVolumeSpecName: "kube-api-access-xcb75") pod "e8590eb8-db8f-45b8-9917-4bfed4ebec04" (UID: "e8590eb8-db8f-45b8-9917-4bfed4ebec04"). InnerVolumeSpecName "kube-api-access-xcb75". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.080290 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.085299 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.111663 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-982b7\" (UniqueName: \"kubernetes.io/projected/105a7128-d0fb-4e01-a86a-d9305eedd58b-kube-api-access-982b7\") pod \"105a7128-d0fb-4e01-a86a-d9305eedd58b\" (UID: \"105a7128-d0fb-4e01-a86a-d9305eedd58b\") " Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.111753 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v58s2\" (UniqueName: \"kubernetes.io/projected/9c547aa6-ce47-47ee-a11d-85f588aab4d3-kube-api-access-v58s2\") pod \"9c547aa6-ce47-47ee-a11d-85f588aab4d3\" (UID: \"9c547aa6-ce47-47ee-a11d-85f588aab4d3\") " Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.112529 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcb75\" (UniqueName: \"kubernetes.io/projected/e8590eb8-db8f-45b8-9917-4bfed4ebec04-kube-api-access-xcb75\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.117071 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/105a7128-d0fb-4e01-a86a-d9305eedd58b-kube-api-access-982b7" (OuterVolumeSpecName: "kube-api-access-982b7") pod "105a7128-d0fb-4e01-a86a-d9305eedd58b" (UID: "105a7128-d0fb-4e01-a86a-d9305eedd58b"). InnerVolumeSpecName "kube-api-access-982b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.129839 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c547aa6-ce47-47ee-a11d-85f588aab4d3-kube-api-access-v58s2" (OuterVolumeSpecName: "kube-api-access-v58s2") pod "9c547aa6-ce47-47ee-a11d-85f588aab4d3" (UID: "9c547aa6-ce47-47ee-a11d-85f588aab4d3"). InnerVolumeSpecName "kube-api-access-v58s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.214111 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-982b7\" (UniqueName: \"kubernetes.io/projected/105a7128-d0fb-4e01-a86a-d9305eedd58b-kube-api-access-982b7\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.214158 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v58s2\" (UniqueName: \"kubernetes.io/projected/9c547aa6-ce47-47ee-a11d-85f588aab4d3-kube-api-access-v58s2\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.482187 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4f27-account-create-x95m2" event={"ID":"105a7128-d0fb-4e01-a86a-d9305eedd58b","Type":"ContainerDied","Data":"bffdd30de8df0d4ff656221813808f2b748dfbd1044b9faf8ee10fd938e67a23"} Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.482564 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bffdd30de8df0d4ff656221813808f2b748dfbd1044b9faf8ee10fd938e67a23" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.482661 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4f27-account-create-x95m2" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.494490 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-896b-account-create-cfp9q" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.494492 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-896b-account-create-cfp9q" event={"ID":"9c547aa6-ce47-47ee-a11d-85f588aab4d3","Type":"ContainerDied","Data":"d6a8d46fe3d49b788fc9991ce142ef99117699e2cbba08b2ca203f4527c24f05"} Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.494675 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6a8d46fe3d49b788fc9991ce142ef99117699e2cbba08b2ca203f4527c24f05" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.496112 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-51f7-account-create-t84pj" event={"ID":"e8590eb8-db8f-45b8-9917-4bfed4ebec04","Type":"ContainerDied","Data":"eb277c92d576a9d8f460bbfbdad86eb05cf1a3c955cbd3b4cf52da777b38e4f9"} Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.496135 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-51f7-account-create-t84pj" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.496192 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb277c92d576a9d8f460bbfbdad86eb05cf1a3c955cbd3b4cf52da777b38e4f9" Oct 11 03:04:25 crc kubenswrapper[4953]: I1011 03:04:25.807792 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" path="/var/lib/kubelet/pods/d7a01ec2-7204-4043-a802-21ca042eea29/volumes" Oct 11 03:04:26 crc kubenswrapper[4953]: I1011 03:04:26.575382 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:26 crc kubenswrapper[4953]: I1011 03:04:26.576259 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-central-agent" containerID="cri-o://dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" gracePeriod=30 Oct 11 03:04:26 crc kubenswrapper[4953]: I1011 03:04:26.576366 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-notification-agent" containerID="cri-o://80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" gracePeriod=30 Oct 11 03:04:26 crc kubenswrapper[4953]: I1011 03:04:26.576285 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="proxy-httpd" containerID="cri-o://e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" gracePeriod=30 Oct 11 03:04:26 crc kubenswrapper[4953]: I1011 03:04:26.576266 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="sg-core" containerID="cri-o://8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" gracePeriod=30 Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.397589 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.460732 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-sg-core-conf-yaml\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.460830 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-config-data\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.460890 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-scripts\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.460929 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-combined-ca-bundle\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.460972 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctgnc\" (UniqueName: \"kubernetes.io/projected/37888789-6e04-42df-af67-132ec4e126d7-kube-api-access-ctgnc\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.460993 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-run-httpd\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.461016 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-log-httpd\") pod \"37888789-6e04-42df-af67-132ec4e126d7\" (UID: \"37888789-6e04-42df-af67-132ec4e126d7\") " Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.461792 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.464269 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.481593 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-scripts" (OuterVolumeSpecName: "scripts") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.486117 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr9q7"] Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.487760 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37888789-6e04-42df-af67-132ec4e126d7-kube-api-access-ctgnc" (OuterVolumeSpecName: "kube-api-access-ctgnc") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "kube-api-access-ctgnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492434 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c547aa6-ce47-47ee-a11d-85f588aab4d3" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492464 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c547aa6-ce47-47ee-a11d-85f588aab4d3" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492476 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-central-agent" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492482 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-central-agent" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492498 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-api" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492506 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-api" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492513 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="105a7128-d0fb-4e01-a86a-d9305eedd58b" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492518 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="105a7128-d0fb-4e01-a86a-d9305eedd58b" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492530 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-notification-agent" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492536 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-notification-agent" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492546 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="proxy-httpd" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492552 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="proxy-httpd" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492566 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8590eb8-db8f-45b8-9917-4bfed4ebec04" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492572 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8590eb8-db8f-45b8-9917-4bfed4ebec04" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492588 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="sg-core" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492593 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="sg-core" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.492619 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-httpd" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492626 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-httpd" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492786 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="proxy-httpd" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492803 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="sg-core" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492811 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="105a7128-d0fb-4e01-a86a-d9305eedd58b" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492823 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-httpd" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492856 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c547aa6-ce47-47ee-a11d-85f588aab4d3" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492867 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8590eb8-db8f-45b8-9917-4bfed4ebec04" containerName="mariadb-account-create" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492877 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-notification-agent" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492884 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="37888789-6e04-42df-af67-132ec4e126d7" containerName="ceilometer-central-agent" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.492894 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a01ec2-7204-4043-a802-21ca042eea29" containerName="neutron-api" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.493419 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.497261 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.497413 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-4vc86" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.497656 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.500629 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr9q7"] Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.510916 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519700 4953 generic.go:334] "Generic (PLEG): container finished" podID="37888789-6e04-42df-af67-132ec4e126d7" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" exitCode=0 Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519725 4953 generic.go:334] "Generic (PLEG): container finished" podID="37888789-6e04-42df-af67-132ec4e126d7" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" exitCode=2 Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519732 4953 generic.go:334] "Generic (PLEG): container finished" podID="37888789-6e04-42df-af67-132ec4e126d7" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" exitCode=0 Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519740 4953 generic.go:334] "Generic (PLEG): container finished" podID="37888789-6e04-42df-af67-132ec4e126d7" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" exitCode=0 Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519759 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerDied","Data":"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070"} Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519783 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerDied","Data":"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce"} Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519795 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerDied","Data":"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660"} Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519804 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerDied","Data":"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7"} Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519812 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"37888789-6e04-42df-af67-132ec4e126d7","Type":"ContainerDied","Data":"5f9db1ab398591cb190fc9181a10f6927da6615bd6325586ecdba05883d6d565"} Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.519827 4953 scope.go:117] "RemoveContainer" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.520150 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.552617 4953 scope.go:117] "RemoveContainer" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.560771 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562472 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562569 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-config-data\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562625 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-scripts\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562665 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqkn5\" (UniqueName: \"kubernetes.io/projected/e3af0283-8529-49e1-ad8a-984617204bed-kube-api-access-cqkn5\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562724 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562740 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562749 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctgnc\" (UniqueName: \"kubernetes.io/projected/37888789-6e04-42df-af67-132ec4e126d7-kube-api-access-ctgnc\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562757 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562765 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/37888789-6e04-42df-af67-132ec4e126d7-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.562777 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.572419 4953 scope.go:117] "RemoveContainer" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.591242 4953 scope.go:117] "RemoveContainer" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.592103 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-config-data" (OuterVolumeSpecName: "config-data") pod "37888789-6e04-42df-af67-132ec4e126d7" (UID: "37888789-6e04-42df-af67-132ec4e126d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.615307 4953 scope.go:117] "RemoveContainer" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.617894 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": container with ID starting with e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070 not found: ID does not exist" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.617953 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070"} err="failed to get container status \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": rpc error: code = NotFound desc = could not find container \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": container with ID starting with e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.617976 4953 scope.go:117] "RemoveContainer" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.618527 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": container with ID starting with 8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce not found: ID does not exist" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.618556 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce"} err="failed to get container status \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": rpc error: code = NotFound desc = could not find container \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": container with ID starting with 8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.618570 4953 scope.go:117] "RemoveContainer" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.618953 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": container with ID starting with 80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660 not found: ID does not exist" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.618968 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660"} err="failed to get container status \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": rpc error: code = NotFound desc = could not find container \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": container with ID starting with 80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.618980 4953 scope.go:117] "RemoveContainer" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" Oct 11 03:04:27 crc kubenswrapper[4953]: E1011 03:04:27.619295 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": container with ID starting with dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7 not found: ID does not exist" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.619311 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7"} err="failed to get container status \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": rpc error: code = NotFound desc = could not find container \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": container with ID starting with dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.619322 4953 scope.go:117] "RemoveContainer" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.620021 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070"} err="failed to get container status \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": rpc error: code = NotFound desc = could not find container \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": container with ID starting with e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.620074 4953 scope.go:117] "RemoveContainer" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.620960 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce"} err="failed to get container status \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": rpc error: code = NotFound desc = could not find container \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": container with ID starting with 8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.620983 4953 scope.go:117] "RemoveContainer" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.621505 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660"} err="failed to get container status \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": rpc error: code = NotFound desc = could not find container \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": container with ID starting with 80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.621524 4953 scope.go:117] "RemoveContainer" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.621875 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7"} err="failed to get container status \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": rpc error: code = NotFound desc = could not find container \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": container with ID starting with dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.621893 4953 scope.go:117] "RemoveContainer" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.622084 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070"} err="failed to get container status \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": rpc error: code = NotFound desc = could not find container \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": container with ID starting with e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.622107 4953 scope.go:117] "RemoveContainer" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.622281 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce"} err="failed to get container status \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": rpc error: code = NotFound desc = could not find container \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": container with ID starting with 8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.622306 4953 scope.go:117] "RemoveContainer" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.622581 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660"} err="failed to get container status \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": rpc error: code = NotFound desc = could not find container \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": container with ID starting with 80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.622628 4953 scope.go:117] "RemoveContainer" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.623245 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7"} err="failed to get container status \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": rpc error: code = NotFound desc = could not find container \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": container with ID starting with dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.623265 4953 scope.go:117] "RemoveContainer" containerID="e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.623715 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070"} err="failed to get container status \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": rpc error: code = NotFound desc = could not find container \"e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070\": container with ID starting with e8b854b47729851806bd7bffffa900321aea9925ca6fce7e02c6f0c219b41070 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.623734 4953 scope.go:117] "RemoveContainer" containerID="8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.623981 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce"} err="failed to get container status \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": rpc error: code = NotFound desc = could not find container \"8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce\": container with ID starting with 8cd53b0205bbae771c473264d8b92d9853b4bddb6a4d7fb8e9c825bf1e85b3ce not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.624001 4953 scope.go:117] "RemoveContainer" containerID="80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.626703 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660"} err="failed to get container status \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": rpc error: code = NotFound desc = could not find container \"80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660\": container with ID starting with 80ec4b4e612ecb3bc4fdfaebc13ca9b60d65e51f2b9760a2556673243ede0660 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.626733 4953 scope.go:117] "RemoveContainer" containerID="dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.626995 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7"} err="failed to get container status \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": rpc error: code = NotFound desc = could not find container \"dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7\": container with ID starting with dc1b12dc8d962e5836454e9e56b945b4d3b49fe1d2a081a40abca2b8fddc9cf7 not found: ID does not exist" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.663701 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-scripts\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.664541 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqkn5\" (UniqueName: \"kubernetes.io/projected/e3af0283-8529-49e1-ad8a-984617204bed-kube-api-access-cqkn5\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.664688 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.665258 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-config-data\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.665348 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37888789-6e04-42df-af67-132ec4e126d7-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.668402 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.669635 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-scripts\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.670406 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-config-data\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.679209 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqkn5\" (UniqueName: \"kubernetes.io/projected/e3af0283-8529-49e1-ad8a-984617204bed-kube-api-access-cqkn5\") pod \"nova-cell0-conductor-db-sync-nr9q7\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.820209 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.854673 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.865597 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.883695 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.886176 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.889248 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.889482 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.891491 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.970804 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-config-data\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.970844 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.970869 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-log-httpd\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.970937 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh47s\" (UniqueName: \"kubernetes.io/projected/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-kube-api-access-qh47s\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.970990 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.971015 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-scripts\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.971054 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-run-httpd\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:27 crc kubenswrapper[4953]: I1011 03:04:27.971345 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.072469 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh47s\" (UniqueName: \"kubernetes.io/projected/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-kube-api-access-qh47s\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.072879 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.072919 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-scripts\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.072983 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-run-httpd\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.073029 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-config-data\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.073052 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.073073 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-log-httpd\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.073591 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-log-httpd\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.073965 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-run-httpd\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.091141 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.091166 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-scripts\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.091469 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-config-data\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.094027 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh47s\" (UniqueName: \"kubernetes.io/projected/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-kube-api-access-qh47s\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.108006 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.258196 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.396050 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr9q7"] Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.535775 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" event={"ID":"e3af0283-8529-49e1-ad8a-984617204bed","Type":"ContainerStarted","Data":"89cf2bd3d346f8233a9f0d43c5ebce96b76f1af29408a6c5760d2693441ac793"} Oct 11 03:04:28 crc kubenswrapper[4953]: I1011 03:04:28.726724 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:28 crc kubenswrapper[4953]: W1011 03:04:28.727087 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0fa5b3f_31b1_44fb_b3a1_835b7a971e81.slice/crio-1631f64e9492ffa3bf93804184116ecbd89099c4a5bb4e203115a2181f961541 WatchSource:0}: Error finding container 1631f64e9492ffa3bf93804184116ecbd89099c4a5bb4e203115a2181f961541: Status 404 returned error can't find the container with id 1631f64e9492ffa3bf93804184116ecbd89099c4a5bb4e203115a2181f961541 Oct 11 03:04:29 crc kubenswrapper[4953]: I1011 03:04:29.550864 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerStarted","Data":"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d"} Oct 11 03:04:29 crc kubenswrapper[4953]: I1011 03:04:29.550915 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerStarted","Data":"1631f64e9492ffa3bf93804184116ecbd89099c4a5bb4e203115a2181f961541"} Oct 11 03:04:29 crc kubenswrapper[4953]: I1011 03:04:29.688754 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:29 crc kubenswrapper[4953]: I1011 03:04:29.815930 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37888789-6e04-42df-af67-132ec4e126d7" path="/var/lib/kubelet/pods/37888789-6e04-42df-af67-132ec4e126d7/volumes" Oct 11 03:04:30 crc kubenswrapper[4953]: I1011 03:04:30.564180 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerStarted","Data":"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1"} Oct 11 03:04:31 crc kubenswrapper[4953]: I1011 03:04:31.576066 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerStarted","Data":"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6"} Oct 11 03:04:35 crc kubenswrapper[4953]: I1011 03:04:35.622165 4953 generic.go:334] "Generic (PLEG): container finished" podID="fb09975c-0012-4449-bc90-1838c68c97f4" containerID="ea55292d376c64d09ec7ed4ba3c80c75a401eaae4a326ffb0c25ae03acea9ca4" exitCode=137 Oct 11 03:04:35 crc kubenswrapper[4953]: I1011 03:04:35.622250 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fb09975c-0012-4449-bc90-1838c68c97f4","Type":"ContainerDied","Data":"ea55292d376c64d09ec7ed4ba3c80c75a401eaae4a326ffb0c25ae03acea9ca4"} Oct 11 03:04:36 crc kubenswrapper[4953]: I1011 03:04:36.664241 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.155:8776/healthcheck\": dial tcp 10.217.0.155:8776: connect: connection refused" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.123055 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136118 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb09975c-0012-4449-bc90-1838c68c97f4-etc-machine-id\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136162 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136194 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb09975c-0012-4449-bc90-1838c68c97f4-logs\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136218 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckrvh\" (UniqueName: \"kubernetes.io/projected/fb09975c-0012-4449-bc90-1838c68c97f4-kube-api-access-ckrvh\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136255 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-combined-ca-bundle\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136295 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data-custom\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.136337 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-scripts\") pod \"fb09975c-0012-4449-bc90-1838c68c97f4\" (UID: \"fb09975c-0012-4449-bc90-1838c68c97f4\") " Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.137147 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb09975c-0012-4449-bc90-1838c68c97f4-logs" (OuterVolumeSpecName: "logs") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.137228 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb09975c-0012-4449-bc90-1838c68c97f4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.142122 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb09975c-0012-4449-bc90-1838c68c97f4-kube-api-access-ckrvh" (OuterVolumeSpecName: "kube-api-access-ckrvh") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "kube-api-access-ckrvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.143592 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.170977 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-scripts" (OuterVolumeSpecName: "scripts") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.235721 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.237492 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckrvh\" (UniqueName: \"kubernetes.io/projected/fb09975c-0012-4449-bc90-1838c68c97f4-kube-api-access-ckrvh\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.237595 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.237778 4953 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.237832 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.237893 4953 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb09975c-0012-4449-bc90-1838c68c97f4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.237947 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb09975c-0012-4449-bc90-1838c68c97f4-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.245813 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data" (OuterVolumeSpecName: "config-data") pod "fb09975c-0012-4449-bc90-1838c68c97f4" (UID: "fb09975c-0012-4449-bc90-1838c68c97f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.339138 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb09975c-0012-4449-bc90-1838c68c97f4-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.649106 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" event={"ID":"e3af0283-8529-49e1-ad8a-984617204bed","Type":"ContainerStarted","Data":"8ed69d6ce145394755f7621e0905e63cf33d227d43b073cef013912d3ad0747d"} Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.652269 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerStarted","Data":"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e"} Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.652381 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-central-agent" containerID="cri-o://571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" gracePeriod=30 Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.652426 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.652479 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="proxy-httpd" containerID="cri-o://965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" gracePeriod=30 Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.652514 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-notification-agent" containerID="cri-o://a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" gracePeriod=30 Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.652817 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="sg-core" containerID="cri-o://438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" gracePeriod=30 Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.662441 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"fb09975c-0012-4449-bc90-1838c68c97f4","Type":"ContainerDied","Data":"b29dd5031a1132f5fc11acc92e32cc0a5a6c5a89790dae159a153c9d4063e8d3"} Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.662498 4953 scope.go:117] "RemoveContainer" containerID="ea55292d376c64d09ec7ed4ba3c80c75a401eaae4a326ffb0c25ae03acea9ca4" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.662568 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.674670 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" podStartSLOduration=2.150204663 podStartE2EDuration="10.674645727s" podCreationTimestamp="2025-10-11 03:04:27 +0000 UTC" firstStartedPulling="2025-10-11 03:04:28.41124138 +0000 UTC m=+1079.344329014" lastFinishedPulling="2025-10-11 03:04:36.935682434 +0000 UTC m=+1087.868770078" observedRunningTime="2025-10-11 03:04:37.668592102 +0000 UTC m=+1088.601679766" watchObservedRunningTime="2025-10-11 03:04:37.674645727 +0000 UTC m=+1088.607733381" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.697326 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.492430369 podStartE2EDuration="10.697304904s" podCreationTimestamp="2025-10-11 03:04:27 +0000 UTC" firstStartedPulling="2025-10-11 03:04:28.729088266 +0000 UTC m=+1079.662175910" lastFinishedPulling="2025-10-11 03:04:36.933962791 +0000 UTC m=+1087.867050445" observedRunningTime="2025-10-11 03:04:37.689439733 +0000 UTC m=+1088.622527387" watchObservedRunningTime="2025-10-11 03:04:37.697304904 +0000 UTC m=+1088.630392548" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.710450 4953 scope.go:117] "RemoveContainer" containerID="e79aacec507f50fe57dddaa95e7df358b9a89f5af5321d51bee2b9653bd09781" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.727276 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.749090 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.762285 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:04:37 crc kubenswrapper[4953]: E1011 03:04:37.762731 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.762751 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api" Oct 11 03:04:37 crc kubenswrapper[4953]: E1011 03:04:37.762765 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api-log" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.762773 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api-log" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.762961 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api-log" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.762984 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" containerName="cinder-api" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.764018 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.779387 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.779962 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.780323 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.786735 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.812723 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb09975c-0012-4449-bc90-1838c68c97f4" path="/var/lib/kubelet/pods/fb09975c-0012-4449-bc90-1838c68c97f4/volumes" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948010 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948373 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c66ca321-9e90-4d55-a75f-a9cb60949914-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948503 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66ca321-9e90-4d55-a75f-a9cb60949914-logs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948619 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-scripts\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948701 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqlqt\" (UniqueName: \"kubernetes.io/projected/c66ca321-9e90-4d55-a75f-a9cb60949914-kube-api-access-fqlqt\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948775 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948860 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-config-data\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.948964 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:37 crc kubenswrapper[4953]: I1011 03:04:37.949070 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-config-data-custom\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.051996 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c66ca321-9e90-4d55-a75f-a9cb60949914-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052143 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66ca321-9e90-4d55-a75f-a9cb60949914-logs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052179 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-scripts\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052220 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqlqt\" (UniqueName: \"kubernetes.io/projected/c66ca321-9e90-4d55-a75f-a9cb60949914-kube-api-access-fqlqt\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052250 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052303 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-config-data\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052345 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052415 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-config-data-custom\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.052484 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.054156 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c66ca321-9e90-4d55-a75f-a9cb60949914-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.054893 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c66ca321-9e90-4d55-a75f-a9cb60949914-logs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.061319 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.062495 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-config-data\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.063006 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-public-tls-certs\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.068200 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.069556 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-config-data-custom\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.073272 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c66ca321-9e90-4d55-a75f-a9cb60949914-scripts\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.075179 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqlqt\" (UniqueName: \"kubernetes.io/projected/c66ca321-9e90-4d55-a75f-a9cb60949914-kube-api-access-fqlqt\") pod \"cinder-api-0\" (UID: \"c66ca321-9e90-4d55-a75f-a9cb60949914\") " pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.110761 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.588936 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 11 03:04:38 crc kubenswrapper[4953]: W1011 03:04:38.594013 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc66ca321_9e90_4d55_a75f_a9cb60949914.slice/crio-ebccfa9ef775962d6bb82800af7bf73f10fa0a32916e6d90c7a8e0f5823677aa WatchSource:0}: Error finding container ebccfa9ef775962d6bb82800af7bf73f10fa0a32916e6d90c7a8e0f5823677aa: Status 404 returned error can't find the container with id ebccfa9ef775962d6bb82800af7bf73f10fa0a32916e6d90c7a8e0f5823677aa Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.604193 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.665383 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-run-httpd\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.665448 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-combined-ca-bundle\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.665485 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-config-data\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.665508 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-sg-core-conf-yaml\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.665525 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-log-httpd\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.666097 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.666116 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.665539 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-scripts\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.666221 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qh47s\" (UniqueName: \"kubernetes.io/projected/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-kube-api-access-qh47s\") pod \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\" (UID: \"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81\") " Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.666657 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.666690 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.671360 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-kube-api-access-qh47s" (OuterVolumeSpecName: "kube-api-access-qh47s") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "kube-api-access-qh47s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.671467 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-scripts" (OuterVolumeSpecName: "scripts") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.677995 4953 generic.go:334] "Generic (PLEG): container finished" podID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" exitCode=0 Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678056 4953 generic.go:334] "Generic (PLEG): container finished" podID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" exitCode=2 Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678064 4953 generic.go:334] "Generic (PLEG): container finished" podID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" exitCode=0 Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678071 4953 generic.go:334] "Generic (PLEG): container finished" podID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" exitCode=0 Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678160 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerDied","Data":"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e"} Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678212 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerDied","Data":"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6"} Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678224 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerDied","Data":"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1"} Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678233 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerDied","Data":"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d"} Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678242 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c0fa5b3f-31b1-44fb-b3a1-835b7a971e81","Type":"ContainerDied","Data":"1631f64e9492ffa3bf93804184116ecbd89099c4a5bb4e203115a2181f961541"} Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678257 4953 scope.go:117] "RemoveContainer" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.678475 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.685092 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c66ca321-9e90-4d55-a75f-a9cb60949914","Type":"ContainerStarted","Data":"ebccfa9ef775962d6bb82800af7bf73f10fa0a32916e6d90c7a8e0f5823677aa"} Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.696670 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.704510 4953 scope.go:117] "RemoveContainer" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.722709 4953 scope.go:117] "RemoveContainer" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.748740 4953 scope.go:117] "RemoveContainer" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.755351 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.769277 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.769330 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.769340 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.769349 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qh47s\" (UniqueName: \"kubernetes.io/projected/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-kube-api-access-qh47s\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.772359 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-config-data" (OuterVolumeSpecName: "config-data") pod "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" (UID: "c0fa5b3f-31b1-44fb-b3a1-835b7a971e81"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.773838 4953 scope.go:117] "RemoveContainer" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" Oct 11 03:04:38 crc kubenswrapper[4953]: E1011 03:04:38.774205 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": container with ID starting with 965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e not found: ID does not exist" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.774258 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e"} err="failed to get container status \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": rpc error: code = NotFound desc = could not find container \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": container with ID starting with 965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.774302 4953 scope.go:117] "RemoveContainer" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" Oct 11 03:04:38 crc kubenswrapper[4953]: E1011 03:04:38.774809 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": container with ID starting with 438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6 not found: ID does not exist" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.774837 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6"} err="failed to get container status \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": rpc error: code = NotFound desc = could not find container \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": container with ID starting with 438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.774858 4953 scope.go:117] "RemoveContainer" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" Oct 11 03:04:38 crc kubenswrapper[4953]: E1011 03:04:38.775184 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": container with ID starting with a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1 not found: ID does not exist" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.775238 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1"} err="failed to get container status \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": rpc error: code = NotFound desc = could not find container \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": container with ID starting with a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.775272 4953 scope.go:117] "RemoveContainer" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" Oct 11 03:04:38 crc kubenswrapper[4953]: E1011 03:04:38.775737 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": container with ID starting with 571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d not found: ID does not exist" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.775772 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d"} err="failed to get container status \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": rpc error: code = NotFound desc = could not find container \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": container with ID starting with 571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.775790 4953 scope.go:117] "RemoveContainer" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776079 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e"} err="failed to get container status \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": rpc error: code = NotFound desc = could not find container \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": container with ID starting with 965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776106 4953 scope.go:117] "RemoveContainer" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776387 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6"} err="failed to get container status \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": rpc error: code = NotFound desc = could not find container \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": container with ID starting with 438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776408 4953 scope.go:117] "RemoveContainer" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776651 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1"} err="failed to get container status \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": rpc error: code = NotFound desc = could not find container \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": container with ID starting with a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776676 4953 scope.go:117] "RemoveContainer" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776935 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d"} err="failed to get container status \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": rpc error: code = NotFound desc = could not find container \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": container with ID starting with 571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.776957 4953 scope.go:117] "RemoveContainer" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.777196 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e"} err="failed to get container status \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": rpc error: code = NotFound desc = could not find container \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": container with ID starting with 965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.777222 4953 scope.go:117] "RemoveContainer" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.777502 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6"} err="failed to get container status \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": rpc error: code = NotFound desc = could not find container \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": container with ID starting with 438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.777520 4953 scope.go:117] "RemoveContainer" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.777767 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1"} err="failed to get container status \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": rpc error: code = NotFound desc = could not find container \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": container with ID starting with a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.777794 4953 scope.go:117] "RemoveContainer" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.778058 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d"} err="failed to get container status \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": rpc error: code = NotFound desc = could not find container \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": container with ID starting with 571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.778085 4953 scope.go:117] "RemoveContainer" containerID="965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.778338 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e"} err="failed to get container status \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": rpc error: code = NotFound desc = could not find container \"965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e\": container with ID starting with 965fa57353d7e726c3e4da60bff00c4f66413040ba805cb8758f23f696031b2e not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.778363 4953 scope.go:117] "RemoveContainer" containerID="438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.778617 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6"} err="failed to get container status \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": rpc error: code = NotFound desc = could not find container \"438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6\": container with ID starting with 438ed3a5b3c0a5c0939a95b663bd94c084fdce02eb7cf775f2addb7e1d717ea6 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.778640 4953 scope.go:117] "RemoveContainer" containerID="a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.779021 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1"} err="failed to get container status \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": rpc error: code = NotFound desc = could not find container \"a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1\": container with ID starting with a3374006b5310fa0c2190737356a9bbc55784e3819dbe94cb93e5e856d90c5d1 not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.779045 4953 scope.go:117] "RemoveContainer" containerID="571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.779290 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d"} err="failed to get container status \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": rpc error: code = NotFound desc = could not find container \"571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d\": container with ID starting with 571b0e7a70b19e9119cc3b89f59d2b00e9e0aa62d7e116c665340d25193c975d not found: ID does not exist" Oct 11 03:04:38 crc kubenswrapper[4953]: I1011 03:04:38.871107 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.022929 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.046024 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.060879 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:39 crc kubenswrapper[4953]: E1011 03:04:39.061261 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="proxy-httpd" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061280 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="proxy-httpd" Oct 11 03:04:39 crc kubenswrapper[4953]: E1011 03:04:39.061320 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="sg-core" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061329 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="sg-core" Oct 11 03:04:39 crc kubenswrapper[4953]: E1011 03:04:39.061340 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-central-agent" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061348 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-central-agent" Oct 11 03:04:39 crc kubenswrapper[4953]: E1011 03:04:39.061366 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-notification-agent" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061374 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-notification-agent" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061560 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-notification-agent" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061575 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="ceilometer-central-agent" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061591 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="sg-core" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.061628 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" containerName="proxy-httpd" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.066882 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.069575 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.069791 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.069997 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075146 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-scripts\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075238 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-run-httpd\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075276 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25q8k\" (UniqueName: \"kubernetes.io/projected/a189ef33-f82a-4186-96bb-8a397957e82f-kube-api-access-25q8k\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075307 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075324 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-config-data\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075349 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-log-httpd\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.075375 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177198 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177447 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-config-data\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177481 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-log-httpd\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177514 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177555 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-scripts\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177628 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-run-httpd\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.177699 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25q8k\" (UniqueName: \"kubernetes.io/projected/a189ef33-f82a-4186-96bb-8a397957e82f-kube-api-access-25q8k\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.178156 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-run-httpd\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.178214 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-log-httpd\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.182416 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.182960 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-config-data\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.185270 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.188014 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-scripts\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.196710 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25q8k\" (UniqueName: \"kubernetes.io/projected/a189ef33-f82a-4186-96bb-8a397957e82f-kube-api-access-25q8k\") pod \"ceilometer-0\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.388923 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.707199 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c66ca321-9e90-4d55-a75f-a9cb60949914","Type":"ContainerStarted","Data":"f14ba3e3dc1a66d52f489d873fc59a5a0049a0c74da5f50b65ca61e26114391c"} Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.831648 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0fa5b3f-31b1-44fb-b3a1-835b7a971e81" path="/var/lib/kubelet/pods/c0fa5b3f-31b1-44fb-b3a1-835b7a971e81/volumes" Oct 11 03:04:39 crc kubenswrapper[4953]: I1011 03:04:39.858944 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:04:39 crc kubenswrapper[4953]: W1011 03:04:39.868206 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda189ef33_f82a_4186_96bb_8a397957e82f.slice/crio-6c060b4823d0ca2f7847aeb30b35f5b92cbe0ef8669c3ee46f8b7efe89c24fb7 WatchSource:0}: Error finding container 6c060b4823d0ca2f7847aeb30b35f5b92cbe0ef8669c3ee46f8b7efe89c24fb7: Status 404 returned error can't find the container with id 6c060b4823d0ca2f7847aeb30b35f5b92cbe0ef8669c3ee46f8b7efe89c24fb7 Oct 11 03:04:40 crc kubenswrapper[4953]: I1011 03:04:40.720419 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerStarted","Data":"444fa5b0c54166879207778b828b4c5556d57b71bfd632fa63c83b62850c851b"} Oct 11 03:04:40 crc kubenswrapper[4953]: I1011 03:04:40.720939 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerStarted","Data":"6c060b4823d0ca2f7847aeb30b35f5b92cbe0ef8669c3ee46f8b7efe89c24fb7"} Oct 11 03:04:40 crc kubenswrapper[4953]: I1011 03:04:40.722302 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c66ca321-9e90-4d55-a75f-a9cb60949914","Type":"ContainerStarted","Data":"5464af322d620d5714ff0f80aa00a683f14b87ae57cb8e3a6acb378dec98c876"} Oct 11 03:04:40 crc kubenswrapper[4953]: I1011 03:04:40.722498 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 11 03:04:41 crc kubenswrapper[4953]: I1011 03:04:41.734458 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerStarted","Data":"04413c4528580daa8beb9e02576812be6058da05b6bda558d4922ae34f60cf2a"} Oct 11 03:04:42 crc kubenswrapper[4953]: I1011 03:04:42.752592 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerStarted","Data":"21d3840b4ba25dd1cbdc32e59e8d3feeed068de8c7199eabe8585b45f305903a"} Oct 11 03:04:44 crc kubenswrapper[4953]: I1011 03:04:44.776444 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerStarted","Data":"ee32d46eef55ec9344c747304a30318c79c2e9eea9a18d115fcfaec41af3a46a"} Oct 11 03:04:44 crc kubenswrapper[4953]: I1011 03:04:44.777427 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:04:44 crc kubenswrapper[4953]: I1011 03:04:44.808001 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.807977348 podStartE2EDuration="7.807977348s" podCreationTimestamp="2025-10-11 03:04:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:04:40.749192137 +0000 UTC m=+1091.682279831" watchObservedRunningTime="2025-10-11 03:04:44.807977348 +0000 UTC m=+1095.741064992" Oct 11 03:04:44 crc kubenswrapper[4953]: I1011 03:04:44.810907 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.58084302 podStartE2EDuration="5.810899352s" podCreationTimestamp="2025-10-11 03:04:39 +0000 UTC" firstStartedPulling="2025-10-11 03:04:39.871690907 +0000 UTC m=+1090.804778591" lastFinishedPulling="2025-10-11 03:04:44.101747279 +0000 UTC m=+1095.034834923" observedRunningTime="2025-10-11 03:04:44.804333275 +0000 UTC m=+1095.737420909" watchObservedRunningTime="2025-10-11 03:04:44.810899352 +0000 UTC m=+1095.743986986" Oct 11 03:04:47 crc kubenswrapper[4953]: I1011 03:04:47.819307 4953 generic.go:334] "Generic (PLEG): container finished" podID="e3af0283-8529-49e1-ad8a-984617204bed" containerID="8ed69d6ce145394755f7621e0905e63cf33d227d43b073cef013912d3ad0747d" exitCode=0 Oct 11 03:04:47 crc kubenswrapper[4953]: I1011 03:04:47.819426 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" event={"ID":"e3af0283-8529-49e1-ad8a-984617204bed","Type":"ContainerDied","Data":"8ed69d6ce145394755f7621e0905e63cf33d227d43b073cef013912d3ad0747d"} Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.195010 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.307191 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-config-data\") pod \"e3af0283-8529-49e1-ad8a-984617204bed\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.307442 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-scripts\") pod \"e3af0283-8529-49e1-ad8a-984617204bed\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.307556 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqkn5\" (UniqueName: \"kubernetes.io/projected/e3af0283-8529-49e1-ad8a-984617204bed-kube-api-access-cqkn5\") pod \"e3af0283-8529-49e1-ad8a-984617204bed\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.307838 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-combined-ca-bundle\") pod \"e3af0283-8529-49e1-ad8a-984617204bed\" (UID: \"e3af0283-8529-49e1-ad8a-984617204bed\") " Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.313744 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3af0283-8529-49e1-ad8a-984617204bed-kube-api-access-cqkn5" (OuterVolumeSpecName: "kube-api-access-cqkn5") pod "e3af0283-8529-49e1-ad8a-984617204bed" (UID: "e3af0283-8529-49e1-ad8a-984617204bed"). InnerVolumeSpecName "kube-api-access-cqkn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.313969 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-scripts" (OuterVolumeSpecName: "scripts") pod "e3af0283-8529-49e1-ad8a-984617204bed" (UID: "e3af0283-8529-49e1-ad8a-984617204bed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.336578 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3af0283-8529-49e1-ad8a-984617204bed" (UID: "e3af0283-8529-49e1-ad8a-984617204bed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.338084 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-config-data" (OuterVolumeSpecName: "config-data") pod "e3af0283-8529-49e1-ad8a-984617204bed" (UID: "e3af0283-8529-49e1-ad8a-984617204bed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.409522 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.409834 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.409890 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3af0283-8529-49e1-ad8a-984617204bed-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.409956 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqkn5\" (UniqueName: \"kubernetes.io/projected/e3af0283-8529-49e1-ad8a-984617204bed-kube-api-access-cqkn5\") on node \"crc\" DevicePath \"\"" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.843924 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" event={"ID":"e3af0283-8529-49e1-ad8a-984617204bed","Type":"ContainerDied","Data":"89cf2bd3d346f8233a9f0d43c5ebce96b76f1af29408a6c5760d2693441ac793"} Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.843969 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89cf2bd3d346f8233a9f0d43c5ebce96b76f1af29408a6c5760d2693441ac793" Oct 11 03:04:49 crc kubenswrapper[4953]: I1011 03:04:49.844019 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nr9q7" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.019805 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 11 03:04:50 crc kubenswrapper[4953]: E1011 03:04:50.020493 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3af0283-8529-49e1-ad8a-984617204bed" containerName="nova-cell0-conductor-db-sync" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.020518 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3af0283-8529-49e1-ad8a-984617204bed" containerName="nova-cell0-conductor-db-sync" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.020857 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3af0283-8529-49e1-ad8a-984617204bed" containerName="nova-cell0-conductor-db-sync" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.021581 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.024839 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-4vc86" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.025038 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.027265 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.035658 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.122349 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b207f8c3-a1f3-4d56-b506-07371e512f23-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.122401 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b207f8c3-a1f3-4d56-b506-07371e512f23-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.123002 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbxpt\" (UniqueName: \"kubernetes.io/projected/b207f8c3-a1f3-4d56-b506-07371e512f23-kube-api-access-sbxpt\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.224724 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbxpt\" (UniqueName: \"kubernetes.io/projected/b207f8c3-a1f3-4d56-b506-07371e512f23-kube-api-access-sbxpt\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.224776 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b207f8c3-a1f3-4d56-b506-07371e512f23-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.224796 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b207f8c3-a1f3-4d56-b506-07371e512f23-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.229397 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b207f8c3-a1f3-4d56-b506-07371e512f23-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.241145 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbxpt\" (UniqueName: \"kubernetes.io/projected/b207f8c3-a1f3-4d56-b506-07371e512f23-kube-api-access-sbxpt\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.242559 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b207f8c3-a1f3-4d56-b506-07371e512f23-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b207f8c3-a1f3-4d56-b506-07371e512f23\") " pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.346512 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.794849 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 11 03:04:50 crc kubenswrapper[4953]: W1011 03:04:50.813258 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb207f8c3_a1f3_4d56_b506_07371e512f23.slice/crio-80c762d49ec5d4604d913fee4359a0d8d1b33e3e0e7316f549074261552acbc2 WatchSource:0}: Error finding container 80c762d49ec5d4604d913fee4359a0d8d1b33e3e0e7316f549074261552acbc2: Status 404 returned error can't find the container with id 80c762d49ec5d4604d913fee4359a0d8d1b33e3e0e7316f549074261552acbc2 Oct 11 03:04:50 crc kubenswrapper[4953]: I1011 03:04:50.859334 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b207f8c3-a1f3-4d56-b506-07371e512f23","Type":"ContainerStarted","Data":"80c762d49ec5d4604d913fee4359a0d8d1b33e3e0e7316f549074261552acbc2"} Oct 11 03:04:51 crc kubenswrapper[4953]: I1011 03:04:51.871294 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b207f8c3-a1f3-4d56-b506-07371e512f23","Type":"ContainerStarted","Data":"a9db1bf1e8c83061b145209a04c29b56c08f58f637d8df9e8c9b0787c68b36f5"} Oct 11 03:04:51 crc kubenswrapper[4953]: I1011 03:04:51.873259 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 11 03:04:51 crc kubenswrapper[4953]: I1011 03:04:51.901763 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.901738821 podStartE2EDuration="2.901738821s" podCreationTimestamp="2025-10-11 03:04:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:04:51.892457995 +0000 UTC m=+1102.825545669" watchObservedRunningTime="2025-10-11 03:04:51.901738821 +0000 UTC m=+1102.834826505" Oct 11 03:05:00 crc kubenswrapper[4953]: I1011 03:05:00.402953 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 11 03:05:00 crc kubenswrapper[4953]: I1011 03:05:00.895384 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-p9vc4"] Oct 11 03:05:00 crc kubenswrapper[4953]: I1011 03:05:00.896891 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:00 crc kubenswrapper[4953]: I1011 03:05:00.900369 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 11 03:05:00 crc kubenswrapper[4953]: I1011 03:05:00.901238 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 11 03:05:00 crc kubenswrapper[4953]: I1011 03:05:00.914018 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p9vc4"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.028768 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.030119 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.033710 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.037073 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.037196 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85j27\" (UniqueName: \"kubernetes.io/projected/07c44d49-ff7d-45bc-af7e-55eee19b672b-kube-api-access-85j27\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.037297 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-config-data\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.037376 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-scripts\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.042256 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.127510 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.129829 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.131569 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138476 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-config-data\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138552 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-scripts\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138680 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftgcj\" (UniqueName: \"kubernetes.io/projected/c8b103c6-0197-4872-8965-750ff1a487c7-kube-api-access-ftgcj\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138720 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138748 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138772 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.138808 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85j27\" (UniqueName: \"kubernetes.io/projected/07c44d49-ff7d-45bc-af7e-55eee19b672b-kube-api-access-85j27\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.139181 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.148706 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-scripts\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.148909 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.149060 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-config-data\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.208136 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85j27\" (UniqueName: \"kubernetes.io/projected/07c44d49-ff7d-45bc-af7e-55eee19b672b-kube-api-access-85j27\") pod \"nova-cell0-cell-mapping-p9vc4\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.226821 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242681 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftgcj\" (UniqueName: \"kubernetes.io/projected/c8b103c6-0197-4872-8965-750ff1a487c7-kube-api-access-ftgcj\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242780 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242818 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242838 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g75m\" (UniqueName: \"kubernetes.io/projected/9760db9b-1a99-4e36-908b-7e808e5b7bcb-kube-api-access-2g75m\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242938 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-config-data\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.242957 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9760db9b-1a99-4e36-908b-7e808e5b7bcb-logs\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.261285 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.264199 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.268801 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.269823 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.277000 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.288004 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftgcj\" (UniqueName: \"kubernetes.io/projected/c8b103c6-0197-4872-8965-750ff1a487c7-kube-api-access-ftgcj\") pod \"nova-cell1-novncproxy-0\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.298578 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.343976 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.344531 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-config-data\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.344684 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9760db9b-1a99-4e36-908b-7e808e5b7bcb-logs\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.344790 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.344894 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55dzl\" (UniqueName: \"kubernetes.io/projected/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-kube-api-access-55dzl\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.344992 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.345549 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g75m\" (UniqueName: \"kubernetes.io/projected/9760db9b-1a99-4e36-908b-7e808e5b7bcb-kube-api-access-2g75m\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.345690 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-config-data\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.345196 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9760db9b-1a99-4e36-908b-7e808e5b7bcb-logs\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.347757 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.349798 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.352978 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-config-data\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.356190 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.365202 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.379552 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g75m\" (UniqueName: \"kubernetes.io/projected/9760db9b-1a99-4e36-908b-7e808e5b7bcb-kube-api-access-2g75m\") pod \"nova-metadata-0\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.383656 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.422857 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-566b5b7845-rwqsc"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.424232 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.429688 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-566b5b7845-rwqsc"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.448572 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-config-data\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.448671 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.448738 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.448804 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55dzl\" (UniqueName: \"kubernetes.io/projected/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-kube-api-access-55dzl\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.448841 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a832138-485e-4bb9-b83b-40ab00295bfb-logs\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.449062 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrq42\" (UniqueName: \"kubernetes.io/projected/6a832138-485e-4bb9-b83b-40ab00295bfb-kube-api-access-qrq42\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.449952 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-config-data\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.451597 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.460842 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-config-data\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.460963 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.471527 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55dzl\" (UniqueName: \"kubernetes.io/projected/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-kube-api-access-55dzl\") pod \"nova-scheduler-0\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552019 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552366 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a832138-485e-4bb9-b83b-40ab00295bfb-logs\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552400 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-config\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552433 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-dns-svc\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552454 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrq42\" (UniqueName: \"kubernetes.io/projected/6a832138-485e-4bb9-b83b-40ab00295bfb-kube-api-access-qrq42\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552482 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-nb\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552509 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wdnx\" (UniqueName: \"kubernetes.io/projected/42c8ee30-3526-44f7-9745-3a14b3adbe89-kube-api-access-8wdnx\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552534 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-sb\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.552574 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-config-data\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.553356 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a832138-485e-4bb9-b83b-40ab00295bfb-logs\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.557775 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.558899 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-config-data\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.573122 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrq42\" (UniqueName: \"kubernetes.io/projected/6a832138-485e-4bb9-b83b-40ab00295bfb-kube-api-access-qrq42\") pod \"nova-api-0\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.646959 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.654899 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-dns-svc\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.655004 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-nb\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.655064 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wdnx\" (UniqueName: \"kubernetes.io/projected/42c8ee30-3526-44f7-9745-3a14b3adbe89-kube-api-access-8wdnx\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.655128 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-sb\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.655281 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-config\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.655900 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-dns-svc\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.656811 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-sb\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.657238 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-nb\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.658241 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-config\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.681728 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wdnx\" (UniqueName: \"kubernetes.io/projected/42c8ee30-3526-44f7-9745-3a14b3adbe89-kube-api-access-8wdnx\") pod \"dnsmasq-dns-566b5b7845-rwqsc\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.704023 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.748309 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.750332 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.817939 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-p9vc4"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.945475 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-m2j4v"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.946729 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.949204 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.949266 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.958009 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-m2j4v"] Oct 11 03:05:01 crc kubenswrapper[4953]: I1011 03:05:01.998296 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p9vc4" event={"ID":"07c44d49-ff7d-45bc-af7e-55eee19b672b","Type":"ContainerStarted","Data":"be4fe4f7c89d512a1dd8c16176097f90205d3178fe3bcbaff598ae274c69ba66"} Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.004725 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c8b103c6-0197-4872-8965-750ff1a487c7","Type":"ContainerStarted","Data":"b70db375fe35db13a980c7246ac4b3d005d0c945110e217a4fa9429db280c081"} Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.058858 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.083972 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.084033 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-scripts\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.084066 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26l4q\" (UniqueName: \"kubernetes.io/projected/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-kube-api-access-26l4q\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.084108 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-config-data\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.186264 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.186348 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-scripts\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.186403 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26l4q\" (UniqueName: \"kubernetes.io/projected/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-kube-api-access-26l4q\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.186473 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-config-data\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.192566 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-scripts\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.193218 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.195830 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-config-data\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.223075 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26l4q\" (UniqueName: \"kubernetes.io/projected/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-kube-api-access-26l4q\") pod \"nova-cell1-conductor-db-sync-m2j4v\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: W1011 03:05:02.224064 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod900e9a31_b67f_47d8_bd2a_f015a6b03ecc.slice/crio-12a67ffa9cd3ebafb298f8423e7659f6fd771902b46251ef1dbc86daef07a6a8 WatchSource:0}: Error finding container 12a67ffa9cd3ebafb298f8423e7659f6fd771902b46251ef1dbc86daef07a6a8: Status 404 returned error can't find the container with id 12a67ffa9cd3ebafb298f8423e7659f6fd771902b46251ef1dbc86daef07a6a8 Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.226875 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.287703 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.350704 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:02 crc kubenswrapper[4953]: W1011 03:05:02.360557 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a832138_485e_4bb9_b83b_40ab00295bfb.slice/crio-bcfd6ab89247138266a9b00a8f285e01bd0c1359552bff9352d99c0c94a026bb WatchSource:0}: Error finding container bcfd6ab89247138266a9b00a8f285e01bd0c1359552bff9352d99c0c94a026bb: Status 404 returned error can't find the container with id bcfd6ab89247138266a9b00a8f285e01bd0c1359552bff9352d99c0c94a026bb Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.455872 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-566b5b7845-rwqsc"] Oct 11 03:05:02 crc kubenswrapper[4953]: I1011 03:05:02.746636 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-m2j4v"] Oct 11 03:05:02 crc kubenswrapper[4953]: W1011 03:05:02.748514 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4149b77_aada_42e3_b8ad_4392fb1e7c0d.slice/crio-f06d1e2a7d9577bd93f3d136dd26acb7a65740fcf74edc3d3fcd1041b29d3daa WatchSource:0}: Error finding container f06d1e2a7d9577bd93f3d136dd26acb7a65740fcf74edc3d3fcd1041b29d3daa: Status 404 returned error can't find the container with id f06d1e2a7d9577bd93f3d136dd26acb7a65740fcf74edc3d3fcd1041b29d3daa Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.016562 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9760db9b-1a99-4e36-908b-7e808e5b7bcb","Type":"ContainerStarted","Data":"333c2cb11512c1dd57425ed41cae842ae8d313e5652a45622408e81fc4e3931d"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.017898 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" event={"ID":"b4149b77-aada-42e3-b8ad-4392fb1e7c0d","Type":"ContainerStarted","Data":"f06d1e2a7d9577bd93f3d136dd26acb7a65740fcf74edc3d3fcd1041b29d3daa"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.019880 4953 generic.go:334] "Generic (PLEG): container finished" podID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerID="0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19" exitCode=0 Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.019947 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" event={"ID":"42c8ee30-3526-44f7-9745-3a14b3adbe89","Type":"ContainerDied","Data":"0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.019973 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" event={"ID":"42c8ee30-3526-44f7-9745-3a14b3adbe89","Type":"ContainerStarted","Data":"bca495349c4140502a02126d61b92108e206e9c903cd0adfab60d27a626b5053"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.022297 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a832138-485e-4bb9-b83b-40ab00295bfb","Type":"ContainerStarted","Data":"bcfd6ab89247138266a9b00a8f285e01bd0c1359552bff9352d99c0c94a026bb"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.023248 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"900e9a31-b67f-47d8-bd2a-f015a6b03ecc","Type":"ContainerStarted","Data":"12a67ffa9cd3ebafb298f8423e7659f6fd771902b46251ef1dbc86daef07a6a8"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.024758 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p9vc4" event={"ID":"07c44d49-ff7d-45bc-af7e-55eee19b672b","Type":"ContainerStarted","Data":"4d61c9be24d437e306c830d10952fc1442143406d7410bcddefa1790e6fac94b"} Oct 11 03:05:03 crc kubenswrapper[4953]: I1011 03:05:03.056906 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-p9vc4" podStartSLOduration=3.056886701 podStartE2EDuration="3.056886701s" podCreationTimestamp="2025-10-11 03:05:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:03.050843097 +0000 UTC m=+1113.983930751" watchObservedRunningTime="2025-10-11 03:05:03.056886701 +0000 UTC m=+1113.989974345" Oct 11 03:05:04 crc kubenswrapper[4953]: I1011 03:05:04.914923 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:04 crc kubenswrapper[4953]: I1011 03:05:04.927904 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.048583 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9760db9b-1a99-4e36-908b-7e808e5b7bcb","Type":"ContainerStarted","Data":"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.048644 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9760db9b-1a99-4e36-908b-7e808e5b7bcb","Type":"ContainerStarted","Data":"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.050560 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" event={"ID":"b4149b77-aada-42e3-b8ad-4392fb1e7c0d","Type":"ContainerStarted","Data":"88ee3e6018fa7012cc7681a03d446b012eb60108987c9c7c24f472d52bf8ed0b"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.052207 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" event={"ID":"42c8ee30-3526-44f7-9745-3a14b3adbe89","Type":"ContainerStarted","Data":"075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.053005 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.054746 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c8b103c6-0197-4872-8965-750ff1a487c7","Type":"ContainerStarted","Data":"d880235da6c9dd4ba0f1516b7e5579d2230bc415a5f17c7b398275b343f9d70b"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.058165 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a832138-485e-4bb9-b83b-40ab00295bfb","Type":"ContainerStarted","Data":"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.058194 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a832138-485e-4bb9-b83b-40ab00295bfb","Type":"ContainerStarted","Data":"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27"} Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.074649 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.24292297 podStartE2EDuration="4.074628704s" podCreationTimestamp="2025-10-11 03:05:01 +0000 UTC" firstStartedPulling="2025-10-11 03:05:02.075219327 +0000 UTC m=+1113.008306971" lastFinishedPulling="2025-10-11 03:05:03.906925061 +0000 UTC m=+1114.840012705" observedRunningTime="2025-10-11 03:05:05.066951428 +0000 UTC m=+1116.000039072" watchObservedRunningTime="2025-10-11 03:05:05.074628704 +0000 UTC m=+1116.007716348" Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.090548 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" podStartSLOduration=4.090527989 podStartE2EDuration="4.090527989s" podCreationTimestamp="2025-10-11 03:05:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:05.085022129 +0000 UTC m=+1116.018109773" watchObservedRunningTime="2025-10-11 03:05:05.090527989 +0000 UTC m=+1116.023615633" Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.129496 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.5829718010000002 podStartE2EDuration="4.129482711s" podCreationTimestamp="2025-10-11 03:05:01 +0000 UTC" firstStartedPulling="2025-10-11 03:05:02.363348176 +0000 UTC m=+1113.296435820" lastFinishedPulling="2025-10-11 03:05:03.909859086 +0000 UTC m=+1114.842946730" observedRunningTime="2025-10-11 03:05:05.125418178 +0000 UTC m=+1116.058505822" watchObservedRunningTime="2025-10-11 03:05:05.129482711 +0000 UTC m=+1116.062570355" Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.150278 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.006661341 podStartE2EDuration="4.15025429s" podCreationTimestamp="2025-10-11 03:05:01 +0000 UTC" firstStartedPulling="2025-10-11 03:05:01.75733662 +0000 UTC m=+1112.690424254" lastFinishedPulling="2025-10-11 03:05:03.900929559 +0000 UTC m=+1114.834017203" observedRunningTime="2025-10-11 03:05:05.141662361 +0000 UTC m=+1116.074749995" watchObservedRunningTime="2025-10-11 03:05:05.15025429 +0000 UTC m=+1116.083341934" Oct 11 03:05:05 crc kubenswrapper[4953]: I1011 03:05:05.164908 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" podStartSLOduration=4.164892783 podStartE2EDuration="4.164892783s" podCreationTimestamp="2025-10-11 03:05:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:05.157640288 +0000 UTC m=+1116.090727932" watchObservedRunningTime="2025-10-11 03:05:05.164892783 +0000 UTC m=+1116.097980427" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.069274 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"900e9a31-b67f-47d8-bd2a-f015a6b03ecc","Type":"ContainerStarted","Data":"1372b2f441e452f1544cd8ee7439fef8aa34915e9b6e05163146195fbba20993"} Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.069368 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-log" containerID="cri-o://d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849" gracePeriod=30 Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.069446 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-metadata" containerID="cri-o://f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1" gracePeriod=30 Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.070641 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="c8b103c6-0197-4872-8965-750ff1a487c7" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://d880235da6c9dd4ba0f1516b7e5579d2230bc415a5f17c7b398275b343f9d70b" gracePeriod=30 Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.106965 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.8399650950000002 podStartE2EDuration="5.106943358s" podCreationTimestamp="2025-10-11 03:05:01 +0000 UTC" firstStartedPulling="2025-10-11 03:05:02.22572209 +0000 UTC m=+1113.158809734" lastFinishedPulling="2025-10-11 03:05:05.492700333 +0000 UTC m=+1116.425787997" observedRunningTime="2025-10-11 03:05:06.097351164 +0000 UTC m=+1117.030438818" watchObservedRunningTime="2025-10-11 03:05:06.106943358 +0000 UTC m=+1117.040031022" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.356937 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.452394 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.452962 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.647739 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.649013 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.806829 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-combined-ca-bundle\") pod \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.806908 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-config-data\") pod \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.807057 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9760db9b-1a99-4e36-908b-7e808e5b7bcb-logs\") pod \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.808365 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9760db9b-1a99-4e36-908b-7e808e5b7bcb-logs" (OuterVolumeSpecName: "logs") pod "9760db9b-1a99-4e36-908b-7e808e5b7bcb" (UID: "9760db9b-1a99-4e36-908b-7e808e5b7bcb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.808449 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2g75m\" (UniqueName: \"kubernetes.io/projected/9760db9b-1a99-4e36-908b-7e808e5b7bcb-kube-api-access-2g75m\") pod \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\" (UID: \"9760db9b-1a99-4e36-908b-7e808e5b7bcb\") " Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.810212 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9760db9b-1a99-4e36-908b-7e808e5b7bcb-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.828577 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9760db9b-1a99-4e36-908b-7e808e5b7bcb-kube-api-access-2g75m" (OuterVolumeSpecName: "kube-api-access-2g75m") pod "9760db9b-1a99-4e36-908b-7e808e5b7bcb" (UID: "9760db9b-1a99-4e36-908b-7e808e5b7bcb"). InnerVolumeSpecName "kube-api-access-2g75m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.849090 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9760db9b-1a99-4e36-908b-7e808e5b7bcb" (UID: "9760db9b-1a99-4e36-908b-7e808e5b7bcb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.858852 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-config-data" (OuterVolumeSpecName: "config-data") pod "9760db9b-1a99-4e36-908b-7e808e5b7bcb" (UID: "9760db9b-1a99-4e36-908b-7e808e5b7bcb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.912860 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2g75m\" (UniqueName: \"kubernetes.io/projected/9760db9b-1a99-4e36-908b-7e808e5b7bcb-kube-api-access-2g75m\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.913193 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:06 crc kubenswrapper[4953]: I1011 03:05:06.913332 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9760db9b-1a99-4e36-908b-7e808e5b7bcb-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.098706 4953 generic.go:334] "Generic (PLEG): container finished" podID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerID="f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1" exitCode=0 Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.098747 4953 generic.go:334] "Generic (PLEG): container finished" podID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerID="d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849" exitCode=143 Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.098789 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.098896 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9760db9b-1a99-4e36-908b-7e808e5b7bcb","Type":"ContainerDied","Data":"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1"} Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.098953 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9760db9b-1a99-4e36-908b-7e808e5b7bcb","Type":"ContainerDied","Data":"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849"} Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.098975 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9760db9b-1a99-4e36-908b-7e808e5b7bcb","Type":"ContainerDied","Data":"333c2cb11512c1dd57425ed41cae842ae8d313e5652a45622408e81fc4e3931d"} Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.099011 4953 scope.go:117] "RemoveContainer" containerID="f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.126799 4953 scope.go:117] "RemoveContainer" containerID="d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.168239 4953 scope.go:117] "RemoveContainer" containerID="f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1" Oct 11 03:05:07 crc kubenswrapper[4953]: E1011 03:05:07.168803 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1\": container with ID starting with f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1 not found: ID does not exist" containerID="f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.168859 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1"} err="failed to get container status \"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1\": rpc error: code = NotFound desc = could not find container \"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1\": container with ID starting with f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1 not found: ID does not exist" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.168905 4953 scope.go:117] "RemoveContainer" containerID="d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849" Oct 11 03:05:07 crc kubenswrapper[4953]: E1011 03:05:07.169284 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849\": container with ID starting with d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849 not found: ID does not exist" containerID="d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.169340 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849"} err="failed to get container status \"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849\": rpc error: code = NotFound desc = could not find container \"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849\": container with ID starting with d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849 not found: ID does not exist" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.169377 4953 scope.go:117] "RemoveContainer" containerID="f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.170028 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1"} err="failed to get container status \"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1\": rpc error: code = NotFound desc = could not find container \"f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1\": container with ID starting with f590e436d587c7e1d23fa38c424b598c858a75d874a28feb79718ac091d345e1 not found: ID does not exist" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.170070 4953 scope.go:117] "RemoveContainer" containerID="d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.170402 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849"} err="failed to get container status \"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849\": rpc error: code = NotFound desc = could not find container \"d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849\": container with ID starting with d17a3ed5d29cabdc2b301f8eb3d5f072c0329c00728545bb1660772315f57849 not found: ID does not exist" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.170451 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.186046 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.200590 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:07 crc kubenswrapper[4953]: E1011 03:05:07.201295 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-log" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.201313 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-log" Oct 11 03:05:07 crc kubenswrapper[4953]: E1011 03:05:07.201325 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-metadata" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.201332 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-metadata" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.201563 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-metadata" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.201584 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" containerName="nova-metadata-log" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.203590 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.206805 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.211032 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.215176 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.226962 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccclj\" (UniqueName: \"kubernetes.io/projected/fbfd5974-7130-4a4d-9ccf-332e8dea4001-kube-api-access-ccclj\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.227012 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.227043 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-config-data\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.227095 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbfd5974-7130-4a4d-9ccf-332e8dea4001-logs\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.227318 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.336899 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.337384 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccclj\" (UniqueName: \"kubernetes.io/projected/fbfd5974-7130-4a4d-9ccf-332e8dea4001-kube-api-access-ccclj\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.337449 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.337525 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-config-data\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.337707 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbfd5974-7130-4a4d-9ccf-332e8dea4001-logs\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.338215 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbfd5974-7130-4a4d-9ccf-332e8dea4001-logs\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.345160 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-config-data\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.346104 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.346257 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.359680 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccclj\" (UniqueName: \"kubernetes.io/projected/fbfd5974-7130-4a4d-9ccf-332e8dea4001-kube-api-access-ccclj\") pod \"nova-metadata-0\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.534311 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:07 crc kubenswrapper[4953]: I1011 03:05:07.810209 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9760db9b-1a99-4e36-908b-7e808e5b7bcb" path="/var/lib/kubelet/pods/9760db9b-1a99-4e36-908b-7e808e5b7bcb/volumes" Oct 11 03:05:08 crc kubenswrapper[4953]: I1011 03:05:08.039382 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:08 crc kubenswrapper[4953]: I1011 03:05:08.134842 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbfd5974-7130-4a4d-9ccf-332e8dea4001","Type":"ContainerStarted","Data":"a40a355fb53b0078e8ca08573e6c52c6468c0135f2909a8fd4447a4d50caeaad"} Oct 11 03:05:09 crc kubenswrapper[4953]: I1011 03:05:09.150121 4953 generic.go:334] "Generic (PLEG): container finished" podID="07c44d49-ff7d-45bc-af7e-55eee19b672b" containerID="4d61c9be24d437e306c830d10952fc1442143406d7410bcddefa1790e6fac94b" exitCode=0 Oct 11 03:05:09 crc kubenswrapper[4953]: I1011 03:05:09.150200 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p9vc4" event={"ID":"07c44d49-ff7d-45bc-af7e-55eee19b672b","Type":"ContainerDied","Data":"4d61c9be24d437e306c830d10952fc1442143406d7410bcddefa1790e6fac94b"} Oct 11 03:05:09 crc kubenswrapper[4953]: I1011 03:05:09.154535 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbfd5974-7130-4a4d-9ccf-332e8dea4001","Type":"ContainerStarted","Data":"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc"} Oct 11 03:05:09 crc kubenswrapper[4953]: I1011 03:05:09.154596 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbfd5974-7130-4a4d-9ccf-332e8dea4001","Type":"ContainerStarted","Data":"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d"} Oct 11 03:05:09 crc kubenswrapper[4953]: I1011 03:05:09.218975 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.218947853 podStartE2EDuration="2.218947853s" podCreationTimestamp="2025-10-11 03:05:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:09.207074131 +0000 UTC m=+1120.140161845" watchObservedRunningTime="2025-10-11 03:05:09.218947853 +0000 UTC m=+1120.152035537" Oct 11 03:05:09 crc kubenswrapper[4953]: I1011 03:05:09.395710 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.551563 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.716887 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-scripts\") pod \"07c44d49-ff7d-45bc-af7e-55eee19b672b\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.717083 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85j27\" (UniqueName: \"kubernetes.io/projected/07c44d49-ff7d-45bc-af7e-55eee19b672b-kube-api-access-85j27\") pod \"07c44d49-ff7d-45bc-af7e-55eee19b672b\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.717163 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-config-data\") pod \"07c44d49-ff7d-45bc-af7e-55eee19b672b\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.717205 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-combined-ca-bundle\") pod \"07c44d49-ff7d-45bc-af7e-55eee19b672b\" (UID: \"07c44d49-ff7d-45bc-af7e-55eee19b672b\") " Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.727743 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-scripts" (OuterVolumeSpecName: "scripts") pod "07c44d49-ff7d-45bc-af7e-55eee19b672b" (UID: "07c44d49-ff7d-45bc-af7e-55eee19b672b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.727835 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07c44d49-ff7d-45bc-af7e-55eee19b672b-kube-api-access-85j27" (OuterVolumeSpecName: "kube-api-access-85j27") pod "07c44d49-ff7d-45bc-af7e-55eee19b672b" (UID: "07c44d49-ff7d-45bc-af7e-55eee19b672b"). InnerVolumeSpecName "kube-api-access-85j27". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.753537 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-config-data" (OuterVolumeSpecName: "config-data") pod "07c44d49-ff7d-45bc-af7e-55eee19b672b" (UID: "07c44d49-ff7d-45bc-af7e-55eee19b672b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.760929 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07c44d49-ff7d-45bc-af7e-55eee19b672b" (UID: "07c44d49-ff7d-45bc-af7e-55eee19b672b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.820145 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85j27\" (UniqueName: \"kubernetes.io/projected/07c44d49-ff7d-45bc-af7e-55eee19b672b-kube-api-access-85j27\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.820189 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.820203 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:10 crc kubenswrapper[4953]: I1011 03:05:10.820214 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07c44d49-ff7d-45bc-af7e-55eee19b672b-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.179728 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-p9vc4" event={"ID":"07c44d49-ff7d-45bc-af7e-55eee19b672b","Type":"ContainerDied","Data":"be4fe4f7c89d512a1dd8c16176097f90205d3178fe3bcbaff598ae274c69ba66"} Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.179814 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be4fe4f7c89d512a1dd8c16176097f90205d3178fe3bcbaff598ae274c69ba66" Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.179852 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-p9vc4" Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.316504 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.316974 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.379894 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.380944 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-log" containerID="cri-o://87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27" gracePeriod=30 Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.381191 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-api" containerID="cri-o://59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471" gracePeriod=30 Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.391317 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.391643 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="900e9a31-b67f-47d8-bd2a-f015a6b03ecc" containerName="nova-scheduler-scheduler" containerID="cri-o://1372b2f441e452f1544cd8ee7439fef8aa34915e9b6e05163146195fbba20993" gracePeriod=30 Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.400174 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.400413 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-log" containerID="cri-o://83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d" gracePeriod=30 Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.400493 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-metadata" containerID="cri-o://d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc" gracePeriod=30 Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.752591 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.830483 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d97fcdd8f-7x4tv"] Oct 11 03:05:11 crc kubenswrapper[4953]: I1011 03:05:11.830764 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerName="dnsmasq-dns" containerID="cri-o://308d5b9022e2f106baf39159ddf4358883b0562355f2b8d698a2e429e6273987" gracePeriod=10 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.017252 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.022945 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.144923 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbfd5974-7130-4a4d-9ccf-332e8dea4001-logs\") pod \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145106 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-config-data\") pod \"6a832138-485e-4bb9-b83b-40ab00295bfb\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145190 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-nova-metadata-tls-certs\") pod \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145301 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrq42\" (UniqueName: \"kubernetes.io/projected/6a832138-485e-4bb9-b83b-40ab00295bfb-kube-api-access-qrq42\") pod \"6a832138-485e-4bb9-b83b-40ab00295bfb\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145332 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-combined-ca-bundle\") pod \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145389 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-combined-ca-bundle\") pod \"6a832138-485e-4bb9-b83b-40ab00295bfb\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145395 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbfd5974-7130-4a4d-9ccf-332e8dea4001-logs" (OuterVolumeSpecName: "logs") pod "fbfd5974-7130-4a4d-9ccf-332e8dea4001" (UID: "fbfd5974-7130-4a4d-9ccf-332e8dea4001"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145425 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a832138-485e-4bb9-b83b-40ab00295bfb-logs\") pod \"6a832138-485e-4bb9-b83b-40ab00295bfb\" (UID: \"6a832138-485e-4bb9-b83b-40ab00295bfb\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145584 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccclj\" (UniqueName: \"kubernetes.io/projected/fbfd5974-7130-4a4d-9ccf-332e8dea4001-kube-api-access-ccclj\") pod \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.145744 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-config-data\") pod \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\" (UID: \"fbfd5974-7130-4a4d-9ccf-332e8dea4001\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.146986 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbfd5974-7130-4a4d-9ccf-332e8dea4001-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.150431 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a832138-485e-4bb9-b83b-40ab00295bfb-logs" (OuterVolumeSpecName: "logs") pod "6a832138-485e-4bb9-b83b-40ab00295bfb" (UID: "6a832138-485e-4bb9-b83b-40ab00295bfb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.153323 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbfd5974-7130-4a4d-9ccf-332e8dea4001-kube-api-access-ccclj" (OuterVolumeSpecName: "kube-api-access-ccclj") pod "fbfd5974-7130-4a4d-9ccf-332e8dea4001" (UID: "fbfd5974-7130-4a4d-9ccf-332e8dea4001"). InnerVolumeSpecName "kube-api-access-ccclj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.154558 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a832138-485e-4bb9-b83b-40ab00295bfb-kube-api-access-qrq42" (OuterVolumeSpecName: "kube-api-access-qrq42") pod "6a832138-485e-4bb9-b83b-40ab00295bfb" (UID: "6a832138-485e-4bb9-b83b-40ab00295bfb"). InnerVolumeSpecName "kube-api-access-qrq42". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.182894 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a832138-485e-4bb9-b83b-40ab00295bfb" (UID: "6a832138-485e-4bb9-b83b-40ab00295bfb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.190642 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-config-data" (OuterVolumeSpecName: "config-data") pod "6a832138-485e-4bb9-b83b-40ab00295bfb" (UID: "6a832138-485e-4bb9-b83b-40ab00295bfb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.204251 4953 generic.go:334] "Generic (PLEG): container finished" podID="b4149b77-aada-42e3-b8ad-4392fb1e7c0d" containerID="88ee3e6018fa7012cc7681a03d446b012eb60108987c9c7c24f472d52bf8ed0b" exitCode=0 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.204361 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" event={"ID":"b4149b77-aada-42e3-b8ad-4392fb1e7c0d","Type":"ContainerDied","Data":"88ee3e6018fa7012cc7681a03d446b012eb60108987c9c7c24f472d52bf8ed0b"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.206150 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-config-data" (OuterVolumeSpecName: "config-data") pod "fbfd5974-7130-4a4d-9ccf-332e8dea4001" (UID: "fbfd5974-7130-4a4d-9ccf-332e8dea4001"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.211914 4953 generic.go:334] "Generic (PLEG): container finished" podID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerID="d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc" exitCode=0 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.211945 4953 generic.go:334] "Generic (PLEG): container finished" podID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerID="83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d" exitCode=143 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.212000 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbfd5974-7130-4a4d-9ccf-332e8dea4001","Type":"ContainerDied","Data":"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.212024 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.212054 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbfd5974-7130-4a4d-9ccf-332e8dea4001","Type":"ContainerDied","Data":"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.212065 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fbfd5974-7130-4a4d-9ccf-332e8dea4001","Type":"ContainerDied","Data":"a40a355fb53b0078e8ca08573e6c52c6468c0135f2909a8fd4447a4d50caeaad"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.212084 4953 scope.go:117] "RemoveContainer" containerID="d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.219254 4953 generic.go:334] "Generic (PLEG): container finished" podID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerID="308d5b9022e2f106baf39159ddf4358883b0562355f2b8d698a2e429e6273987" exitCode=0 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.219350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" event={"ID":"11a120e2-cd03-4c63-8a70-a2bc67f5f511","Type":"ContainerDied","Data":"308d5b9022e2f106baf39159ddf4358883b0562355f2b8d698a2e429e6273987"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.223221 4953 generic.go:334] "Generic (PLEG): container finished" podID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerID="59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471" exitCode=0 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.223247 4953 generic.go:334] "Generic (PLEG): container finished" podID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerID="87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27" exitCode=143 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.223270 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a832138-485e-4bb9-b83b-40ab00295bfb","Type":"ContainerDied","Data":"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.223293 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a832138-485e-4bb9-b83b-40ab00295bfb","Type":"ContainerDied","Data":"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.223302 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a832138-485e-4bb9-b83b-40ab00295bfb","Type":"ContainerDied","Data":"bcfd6ab89247138266a9b00a8f285e01bd0c1359552bff9352d99c0c94a026bb"} Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.223348 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.231910 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbfd5974-7130-4a4d-9ccf-332e8dea4001" (UID: "fbfd5974-7130-4a4d-9ccf-332e8dea4001"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.239549 4953 scope.go:117] "RemoveContainer" containerID="83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248574 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248625 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248639 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrq42\" (UniqueName: \"kubernetes.io/projected/6a832138-485e-4bb9-b83b-40ab00295bfb-kube-api-access-qrq42\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248647 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248656 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a832138-485e-4bb9-b83b-40ab00295bfb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248664 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a832138-485e-4bb9-b83b-40ab00295bfb-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.248672 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccclj\" (UniqueName: \"kubernetes.io/projected/fbfd5974-7130-4a4d-9ccf-332e8dea4001-kube-api-access-ccclj\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.272309 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "fbfd5974-7130-4a4d-9ccf-332e8dea4001" (UID: "fbfd5974-7130-4a4d-9ccf-332e8dea4001"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.273723 4953 scope.go:117] "RemoveContainer" containerID="d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.275864 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc\": container with ID starting with d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc not found: ID does not exist" containerID="d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.275899 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc"} err="failed to get container status \"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc\": rpc error: code = NotFound desc = could not find container \"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc\": container with ID starting with d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.275923 4953 scope.go:117] "RemoveContainer" containerID="83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.275985 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.276505 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d\": container with ID starting with 83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d not found: ID does not exist" containerID="83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.276531 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d"} err="failed to get container status \"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d\": rpc error: code = NotFound desc = could not find container \"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d\": container with ID starting with 83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.276547 4953 scope.go:117] "RemoveContainer" containerID="d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.276897 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc"} err="failed to get container status \"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc\": rpc error: code = NotFound desc = could not find container \"d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc\": container with ID starting with d3e58e49e2f9219c6f6756023685a1f7c858b1d3d071ae9bdcc075bc6ef779dc not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.276916 4953 scope.go:117] "RemoveContainer" containerID="83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.277137 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d"} err="failed to get container status \"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d\": rpc error: code = NotFound desc = could not find container \"83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d\": container with ID starting with 83b8f6f0ce6ce8f6b1f5012c0998910a856d241003dce80c8db31e31aa0cf54d not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.277154 4953 scope.go:117] "RemoveContainer" containerID="59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.295697 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.303534 4953 scope.go:117] "RemoveContainer" containerID="87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.304115 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305384 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305707 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-api" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305719 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-api" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305729 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-log" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305736 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-log" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305750 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-metadata" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305757 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-metadata" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305766 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c44d49-ff7d-45bc-af7e-55eee19b672b" containerName="nova-manage" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305772 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c44d49-ff7d-45bc-af7e-55eee19b672b" containerName="nova-manage" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305784 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerName="init" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305791 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerName="init" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305811 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerName="dnsmasq-dns" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305817 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerName="dnsmasq-dns" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.305831 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-log" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305837 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-log" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.305995 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-log" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.306003 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" containerName="nova-api-api" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.306014 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-metadata" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.306046 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="07c44d49-ff7d-45bc-af7e-55eee19b672b" containerName="nova-manage" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.306056 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" containerName="nova-metadata-log" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.306064 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" containerName="dnsmasq-dns" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.307114 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.311926 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.314734 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.342505 4953 scope.go:117] "RemoveContainer" containerID="59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.343018 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471\": container with ID starting with 59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471 not found: ID does not exist" containerID="59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343052 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471"} err="failed to get container status \"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471\": rpc error: code = NotFound desc = could not find container \"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471\": container with ID starting with 59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471 not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343075 4953 scope.go:117] "RemoveContainer" containerID="87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27" Oct 11 03:05:12 crc kubenswrapper[4953]: E1011 03:05:12.343425 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27\": container with ID starting with 87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27 not found: ID does not exist" containerID="87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343449 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27"} err="failed to get container status \"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27\": rpc error: code = NotFound desc = could not find container \"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27\": container with ID starting with 87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27 not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343461 4953 scope.go:117] "RemoveContainer" containerID="59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343684 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471"} err="failed to get container status \"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471\": rpc error: code = NotFound desc = could not find container \"59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471\": container with ID starting with 59b8997aeedc61b294b55ce5824973d9485576007180a3fde30b652df2e1f471 not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343706 4953 scope.go:117] "RemoveContainer" containerID="87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.343980 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27"} err="failed to get container status \"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27\": rpc error: code = NotFound desc = could not find container \"87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27\": container with ID starting with 87ea1d975b9865cf1cda82eacb17f0c1d19c66f8a07947d477cc0310d5d38e27 not found: ID does not exist" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.350488 4953 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbfd5974-7130-4a4d-9ccf-332e8dea4001-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.451400 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lw9rx\" (UniqueName: \"kubernetes.io/projected/11a120e2-cd03-4c63-8a70-a2bc67f5f511-kube-api-access-lw9rx\") pod \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.451450 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-sb\") pod \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.451563 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-dns-svc\") pod \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.451629 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-nb\") pod \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.451670 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-config\") pod \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\" (UID: \"11a120e2-cd03-4c63-8a70-a2bc67f5f511\") " Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.452016 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-config-data\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.452065 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxv89\" (UniqueName: \"kubernetes.io/projected/1199c05f-e229-403e-a482-3e09c7731bb5-kube-api-access-wxv89\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.452117 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.452151 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1199c05f-e229-403e-a482-3e09c7731bb5-logs\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.455967 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11a120e2-cd03-4c63-8a70-a2bc67f5f511-kube-api-access-lw9rx" (OuterVolumeSpecName: "kube-api-access-lw9rx") pod "11a120e2-cd03-4c63-8a70-a2bc67f5f511" (UID: "11a120e2-cd03-4c63-8a70-a2bc67f5f511"). InnerVolumeSpecName "kube-api-access-lw9rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.505286 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "11a120e2-cd03-4c63-8a70-a2bc67f5f511" (UID: "11a120e2-cd03-4c63-8a70-a2bc67f5f511"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.506818 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "11a120e2-cd03-4c63-8a70-a2bc67f5f511" (UID: "11a120e2-cd03-4c63-8a70-a2bc67f5f511"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.507214 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "11a120e2-cd03-4c63-8a70-a2bc67f5f511" (UID: "11a120e2-cd03-4c63-8a70-a2bc67f5f511"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.513865 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-config" (OuterVolumeSpecName: "config") pod "11a120e2-cd03-4c63-8a70-a2bc67f5f511" (UID: "11a120e2-cd03-4c63-8a70-a2bc67f5f511"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553230 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-config-data\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553298 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxv89\" (UniqueName: \"kubernetes.io/projected/1199c05f-e229-403e-a482-3e09c7731bb5-kube-api-access-wxv89\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553370 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553406 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1199c05f-e229-403e-a482-3e09c7731bb5-logs\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553528 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553538 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553548 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553558 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lw9rx\" (UniqueName: \"kubernetes.io/projected/11a120e2-cd03-4c63-8a70-a2bc67f5f511-kube-api-access-lw9rx\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553568 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11a120e2-cd03-4c63-8a70-a2bc67f5f511-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.553972 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1199c05f-e229-403e-a482-3e09c7731bb5-logs\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.558062 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.558332 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-config-data\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.569808 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxv89\" (UniqueName: \"kubernetes.io/projected/1199c05f-e229-403e-a482-3e09c7731bb5-kube-api-access-wxv89\") pod \"nova-api-0\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.634423 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.667123 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.675124 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.681680 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.683508 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.692821 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.693058 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.697307 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.789159 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.789949 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="8c48e3d3-4846-4492-930a-110c3bf715b1" containerName="kube-state-metrics" containerID="cri-o://2c9e75b715e5b7ad42c5154efff608a719f20ff46c9626e2ee65b8fcc742c7e0" gracePeriod=30 Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.859315 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmlbm\" (UniqueName: \"kubernetes.io/projected/79df2c0c-e0aa-4999-a2be-72941b080ce1-kube-api-access-jmlbm\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.859379 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79df2c0c-e0aa-4999-a2be-72941b080ce1-logs\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.859489 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-config-data\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.859844 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.870002 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.971558 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79df2c0c-e0aa-4999-a2be-72941b080ce1-logs\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.971702 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-config-data\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.971751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.971771 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.971849 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmlbm\" (UniqueName: \"kubernetes.io/projected/79df2c0c-e0aa-4999-a2be-72941b080ce1-kube-api-access-jmlbm\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.975371 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79df2c0c-e0aa-4999-a2be-72941b080ce1-logs\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.979212 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-config-data\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.979505 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.980441 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:12 crc kubenswrapper[4953]: I1011 03:05:12.994795 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmlbm\" (UniqueName: \"kubernetes.io/projected/79df2c0c-e0aa-4999-a2be-72941b080ce1-kube-api-access-jmlbm\") pod \"nova-metadata-0\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " pod="openstack/nova-metadata-0" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.064054 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.148113 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:13 crc kubenswrapper[4953]: W1011 03:05:13.157325 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1199c05f_e229_403e_a482_3e09c7731bb5.slice/crio-fd6f59e16549eb3d72a3ed2e456508fffc16a8f6830aef8e558dd7239749f905 WatchSource:0}: Error finding container fd6f59e16549eb3d72a3ed2e456508fffc16a8f6830aef8e558dd7239749f905: Status 404 returned error can't find the container with id fd6f59e16549eb3d72a3ed2e456508fffc16a8f6830aef8e558dd7239749f905 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.243854 4953 generic.go:334] "Generic (PLEG): container finished" podID="900e9a31-b67f-47d8-bd2a-f015a6b03ecc" containerID="1372b2f441e452f1544cd8ee7439fef8aa34915e9b6e05163146195fbba20993" exitCode=0 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.243927 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"900e9a31-b67f-47d8-bd2a-f015a6b03ecc","Type":"ContainerDied","Data":"1372b2f441e452f1544cd8ee7439fef8aa34915e9b6e05163146195fbba20993"} Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.243950 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"900e9a31-b67f-47d8-bd2a-f015a6b03ecc","Type":"ContainerDied","Data":"12a67ffa9cd3ebafb298f8423e7659f6fd771902b46251ef1dbc86daef07a6a8"} Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.243961 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12a67ffa9cd3ebafb298f8423e7659f6fd771902b46251ef1dbc86daef07a6a8" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.246152 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1199c05f-e229-403e-a482-3e09c7731bb5","Type":"ContainerStarted","Data":"fd6f59e16549eb3d72a3ed2e456508fffc16a8f6830aef8e558dd7239749f905"} Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.250112 4953 generic.go:334] "Generic (PLEG): container finished" podID="8c48e3d3-4846-4492-930a-110c3bf715b1" containerID="2c9e75b715e5b7ad42c5154efff608a719f20ff46c9626e2ee65b8fcc742c7e0" exitCode=2 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.250217 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8c48e3d3-4846-4492-930a-110c3bf715b1","Type":"ContainerDied","Data":"2c9e75b715e5b7ad42c5154efff608a719f20ff46c9626e2ee65b8fcc742c7e0"} Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.253028 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.253103 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d97fcdd8f-7x4tv" event={"ID":"11a120e2-cd03-4c63-8a70-a2bc67f5f511","Type":"ContainerDied","Data":"60d531642075e252b1fb8f2e7ba02f8f6431ecacfe6feacc2463dc314cf02f11"} Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.253181 4953 scope.go:117] "RemoveContainer" containerID="308d5b9022e2f106baf39159ddf4358883b0562355f2b8d698a2e429e6273987" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.286056 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.316237 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.316342 4953 scope.go:117] "RemoveContainer" containerID="f024f209ecc3c5055d88b3ce6c7de3b6d8ef5997b76ae61962f64988a02c6ed2" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.348837 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d97fcdd8f-7x4tv"] Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.360517 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d97fcdd8f-7x4tv"] Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.478794 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55dzl\" (UniqueName: \"kubernetes.io/projected/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-kube-api-access-55dzl\") pod \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.479203 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-config-data\") pod \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.479246 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8qkv\" (UniqueName: \"kubernetes.io/projected/8c48e3d3-4846-4492-930a-110c3bf715b1-kube-api-access-b8qkv\") pod \"8c48e3d3-4846-4492-930a-110c3bf715b1\" (UID: \"8c48e3d3-4846-4492-930a-110c3bf715b1\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.479339 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-combined-ca-bundle\") pod \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\" (UID: \"900e9a31-b67f-47d8-bd2a-f015a6b03ecc\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.500169 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c48e3d3-4846-4492-930a-110c3bf715b1-kube-api-access-b8qkv" (OuterVolumeSpecName: "kube-api-access-b8qkv") pod "8c48e3d3-4846-4492-930a-110c3bf715b1" (UID: "8c48e3d3-4846-4492-930a-110c3bf715b1"). InnerVolumeSpecName "kube-api-access-b8qkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.510734 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-kube-api-access-55dzl" (OuterVolumeSpecName: "kube-api-access-55dzl") pod "900e9a31-b67f-47d8-bd2a-f015a6b03ecc" (UID: "900e9a31-b67f-47d8-bd2a-f015a6b03ecc"). InnerVolumeSpecName "kube-api-access-55dzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.558314 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-config-data" (OuterVolumeSpecName: "config-data") pod "900e9a31-b67f-47d8-bd2a-f015a6b03ecc" (UID: "900e9a31-b67f-47d8-bd2a-f015a6b03ecc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.572890 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "900e9a31-b67f-47d8-bd2a-f015a6b03ecc" (UID: "900e9a31-b67f-47d8-bd2a-f015a6b03ecc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.581514 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8qkv\" (UniqueName: \"kubernetes.io/projected/8c48e3d3-4846-4492-930a-110c3bf715b1-kube-api-access-b8qkv\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.581550 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.581559 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55dzl\" (UniqueName: \"kubernetes.io/projected/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-kube-api-access-55dzl\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.581570 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900e9a31-b67f-47d8-bd2a-f015a6b03ecc-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.631681 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.807490 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11a120e2-cd03-4c63-8a70-a2bc67f5f511" path="/var/lib/kubelet/pods/11a120e2-cd03-4c63-8a70-a2bc67f5f511/volumes" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.808853 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a832138-485e-4bb9-b83b-40ab00295bfb" path="/var/lib/kubelet/pods/6a832138-485e-4bb9-b83b-40ab00295bfb/volumes" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.810032 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbfd5974-7130-4a4d-9ccf-332e8dea4001" path="/var/lib/kubelet/pods/fbfd5974-7130-4a4d-9ccf-332e8dea4001/volumes" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.811624 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.811922 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-central-agent" containerID="cri-o://444fa5b0c54166879207778b828b4c5556d57b71bfd632fa63c83b62850c851b" gracePeriod=30 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.813126 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="sg-core" containerID="cri-o://21d3840b4ba25dd1cbdc32e59e8d3feeed068de8c7199eabe8585b45f305903a" gracePeriod=30 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.813248 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="proxy-httpd" containerID="cri-o://ee32d46eef55ec9344c747304a30318c79c2e9eea9a18d115fcfaec41af3a46a" gracePeriod=30 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.813293 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-notification-agent" containerID="cri-o://04413c4528580daa8beb9e02576812be6058da05b6bda558d4922ae34f60cf2a" gracePeriod=30 Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.836586 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.989840 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-config-data\") pod \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.989894 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26l4q\" (UniqueName: \"kubernetes.io/projected/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-kube-api-access-26l4q\") pod \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.989959 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-scripts\") pod \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.990095 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-combined-ca-bundle\") pod \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\" (UID: \"b4149b77-aada-42e3-b8ad-4392fb1e7c0d\") " Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.994472 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-kube-api-access-26l4q" (OuterVolumeSpecName: "kube-api-access-26l4q") pod "b4149b77-aada-42e3-b8ad-4392fb1e7c0d" (UID: "b4149b77-aada-42e3-b8ad-4392fb1e7c0d"). InnerVolumeSpecName "kube-api-access-26l4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:13 crc kubenswrapper[4953]: I1011 03:05:13.994530 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-scripts" (OuterVolumeSpecName: "scripts") pod "b4149b77-aada-42e3-b8ad-4392fb1e7c0d" (UID: "b4149b77-aada-42e3-b8ad-4392fb1e7c0d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.017355 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-config-data" (OuterVolumeSpecName: "config-data") pod "b4149b77-aada-42e3-b8ad-4392fb1e7c0d" (UID: "b4149b77-aada-42e3-b8ad-4392fb1e7c0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.018735 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4149b77-aada-42e3-b8ad-4392fb1e7c0d" (UID: "b4149b77-aada-42e3-b8ad-4392fb1e7c0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.092398 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.092444 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.092458 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26l4q\" (UniqueName: \"kubernetes.io/projected/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-kube-api-access-26l4q\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.092471 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4149b77-aada-42e3-b8ad-4392fb1e7c0d-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.268530 4953 generic.go:334] "Generic (PLEG): container finished" podID="a189ef33-f82a-4186-96bb-8a397957e82f" containerID="ee32d46eef55ec9344c747304a30318c79c2e9eea9a18d115fcfaec41af3a46a" exitCode=0 Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.269029 4953 generic.go:334] "Generic (PLEG): container finished" podID="a189ef33-f82a-4186-96bb-8a397957e82f" containerID="21d3840b4ba25dd1cbdc32e59e8d3feeed068de8c7199eabe8585b45f305903a" exitCode=2 Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.269053 4953 generic.go:334] "Generic (PLEG): container finished" podID="a189ef33-f82a-4186-96bb-8a397957e82f" containerID="444fa5b0c54166879207778b828b4c5556d57b71bfd632fa63c83b62850c851b" exitCode=0 Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.269036 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerDied","Data":"ee32d46eef55ec9344c747304a30318c79c2e9eea9a18d115fcfaec41af3a46a"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.269539 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerDied","Data":"21d3840b4ba25dd1cbdc32e59e8d3feeed068de8c7199eabe8585b45f305903a"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.269748 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerDied","Data":"444fa5b0c54166879207778b828b4c5556d57b71bfd632fa63c83b62850c851b"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.271931 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1199c05f-e229-403e-a482-3e09c7731bb5","Type":"ContainerStarted","Data":"d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.271982 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1199c05f-e229-403e-a482-3e09c7731bb5","Type":"ContainerStarted","Data":"83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.275306 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.275383 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8c48e3d3-4846-4492-930a-110c3bf715b1","Type":"ContainerDied","Data":"ea02eb5a6f81e0bcd9732b97ed669796c546b42cbf00969963bcc3de4acd0114"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.275442 4953 scope.go:117] "RemoveContainer" containerID="2c9e75b715e5b7ad42c5154efff608a719f20ff46c9626e2ee65b8fcc742c7e0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.281936 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"79df2c0c-e0aa-4999-a2be-72941b080ce1","Type":"ContainerStarted","Data":"ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.281972 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"79df2c0c-e0aa-4999-a2be-72941b080ce1","Type":"ContainerStarted","Data":"7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.281985 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"79df2c0c-e0aa-4999-a2be-72941b080ce1","Type":"ContainerStarted","Data":"aff45df6f8166aeed49ef41ec4d7035a5be62b8c289f3cce75d536da14efa827"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.284036 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" event={"ID":"b4149b77-aada-42e3-b8ad-4392fb1e7c0d","Type":"ContainerDied","Data":"f06d1e2a7d9577bd93f3d136dd26acb7a65740fcf74edc3d3fcd1041b29d3daa"} Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.284080 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f06d1e2a7d9577bd93f3d136dd26acb7a65740fcf74edc3d3fcd1041b29d3daa" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.284060 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-m2j4v" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.284362 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.299708 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.299688833 podStartE2EDuration="2.299688833s" podCreationTimestamp="2025-10-11 03:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:14.295012374 +0000 UTC m=+1125.228100048" watchObservedRunningTime="2025-10-11 03:05:14.299688833 +0000 UTC m=+1125.232776497" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.316473 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: E1011 03:05:14.316889 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4149b77-aada-42e3-b8ad-4392fb1e7c0d" containerName="nova-cell1-conductor-db-sync" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.316906 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4149b77-aada-42e3-b8ad-4392fb1e7c0d" containerName="nova-cell1-conductor-db-sync" Oct 11 03:05:14 crc kubenswrapper[4953]: E1011 03:05:14.316928 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="900e9a31-b67f-47d8-bd2a-f015a6b03ecc" containerName="nova-scheduler-scheduler" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.316936 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="900e9a31-b67f-47d8-bd2a-f015a6b03ecc" containerName="nova-scheduler-scheduler" Oct 11 03:05:14 crc kubenswrapper[4953]: E1011 03:05:14.316946 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c48e3d3-4846-4492-930a-110c3bf715b1" containerName="kube-state-metrics" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.316952 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c48e3d3-4846-4492-930a-110c3bf715b1" containerName="kube-state-metrics" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.317140 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="900e9a31-b67f-47d8-bd2a-f015a6b03ecc" containerName="nova-scheduler-scheduler" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.317224 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c48e3d3-4846-4492-930a-110c3bf715b1" containerName="kube-state-metrics" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.317236 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4149b77-aada-42e3-b8ad-4392fb1e7c0d" containerName="nova-cell1-conductor-db-sync" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.317811 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.320982 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.330765 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.340655 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.349809 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.363193 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.370971 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.379675 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.380883 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.384670 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.391046 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.392529 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.398484 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.398658 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.399412 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.409735 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.409715025 podStartE2EDuration="2.409715025s" podCreationTimestamp="2025-10-11 03:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:14.377012322 +0000 UTC m=+1125.310099966" watchObservedRunningTime="2025-10-11 03:05:14.409715025 +0000 UTC m=+1125.342802669" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.466858 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499697 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499765 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-config-data\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499812 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7pgz\" (UniqueName: \"kubernetes.io/projected/b2036add-c9d0-433b-ad33-c007af59c686-kube-api-access-w7pgz\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499870 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h889n\" (UniqueName: \"kubernetes.io/projected/a9131dd4-ea0c-401c-ac62-20df5d458956-kube-api-access-h889n\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499915 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499964 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.499991 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.500016 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79e7d0fa-d4c5-485f-be5e-06d94de4e604-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.500039 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8jxm\" (UniqueName: \"kubernetes.io/projected/79e7d0fa-d4c5-485f-be5e-06d94de4e604-kube-api-access-p8jxm\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.500098 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79e7d0fa-d4c5-485f-be5e-06d94de4e604-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601572 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h889n\" (UniqueName: \"kubernetes.io/projected/a9131dd4-ea0c-401c-ac62-20df5d458956-kube-api-access-h889n\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601686 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601724 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601750 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601773 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79e7d0fa-d4c5-485f-be5e-06d94de4e604-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601845 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8jxm\" (UniqueName: \"kubernetes.io/projected/79e7d0fa-d4c5-485f-be5e-06d94de4e604-kube-api-access-p8jxm\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601895 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79e7d0fa-d4c5-485f-be5e-06d94de4e604-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601966 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.601983 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-config-data\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.602014 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7pgz\" (UniqueName: \"kubernetes.io/projected/b2036add-c9d0-433b-ad33-c007af59c686-kube-api-access-w7pgz\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.607165 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.607613 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.608311 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-config-data\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.608740 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79e7d0fa-d4c5-485f-be5e-06d94de4e604-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.610095 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79e7d0fa-d4c5-485f-be5e-06d94de4e604-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.615301 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.617821 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2036add-c9d0-433b-ad33-c007af59c686-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.617835 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h889n\" (UniqueName: \"kubernetes.io/projected/a9131dd4-ea0c-401c-ac62-20df5d458956-kube-api-access-h889n\") pod \"nova-scheduler-0\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.619907 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8jxm\" (UniqueName: \"kubernetes.io/projected/79e7d0fa-d4c5-485f-be5e-06d94de4e604-kube-api-access-p8jxm\") pod \"nova-cell1-conductor-0\" (UID: \"79e7d0fa-d4c5-485f-be5e-06d94de4e604\") " pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.620785 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7pgz\" (UniqueName: \"kubernetes.io/projected/b2036add-c9d0-433b-ad33-c007af59c686-kube-api-access-w7pgz\") pod \"kube-state-metrics-0\" (UID: \"b2036add-c9d0-433b-ad33-c007af59c686\") " pod="openstack/kube-state-metrics-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.641750 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.794528 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:14 crc kubenswrapper[4953]: I1011 03:05:14.855763 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.072907 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 11 03:05:15 crc kubenswrapper[4953]: W1011 03:05:15.074645 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79e7d0fa_d4c5_485f_be5e_06d94de4e604.slice/crio-af26291dae5cb8436cd564ea1d0ecf119e8c3125893913e7b50d04066cae7013 WatchSource:0}: Error finding container af26291dae5cb8436cd564ea1d0ecf119e8c3125893913e7b50d04066cae7013: Status 404 returned error can't find the container with id af26291dae5cb8436cd564ea1d0ecf119e8c3125893913e7b50d04066cae7013 Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.303973 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"79e7d0fa-d4c5-485f-be5e-06d94de4e604","Type":"ContainerStarted","Data":"6924c17804bb8eed82d42ff1377b4043ed6ebe3e192d7667a3922345542f8ebf"} Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.304040 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"79e7d0fa-d4c5-485f-be5e-06d94de4e604","Type":"ContainerStarted","Data":"af26291dae5cb8436cd564ea1d0ecf119e8c3125893913e7b50d04066cae7013"} Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.305463 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.305587 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.336910 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.336879251 podStartE2EDuration="1.336879251s" podCreationTimestamp="2025-10-11 03:05:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:15.322976927 +0000 UTC m=+1126.256064611" watchObservedRunningTime="2025-10-11 03:05:15.336879251 +0000 UTC m=+1126.269966935" Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.376823 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 11 03:05:15 crc kubenswrapper[4953]: W1011 03:05:15.378986 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2036add_c9d0_433b_ad33_c007af59c686.slice/crio-8b4091253fc12a30f3e640d4f9f5fdabf376f37b7fb3e6f5b93b8f13f6bb211a WatchSource:0}: Error finding container 8b4091253fc12a30f3e640d4f9f5fdabf376f37b7fb3e6f5b93b8f13f6bb211a: Status 404 returned error can't find the container with id 8b4091253fc12a30f3e640d4f9f5fdabf376f37b7fb3e6f5b93b8f13f6bb211a Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.817787 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c48e3d3-4846-4492-930a-110c3bf715b1" path="/var/lib/kubelet/pods/8c48e3d3-4846-4492-930a-110c3bf715b1/volumes" Oct 11 03:05:15 crc kubenswrapper[4953]: I1011 03:05:15.819356 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="900e9a31-b67f-47d8-bd2a-f015a6b03ecc" path="/var/lib/kubelet/pods/900e9a31-b67f-47d8-bd2a-f015a6b03ecc/volumes" Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.323429 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9131dd4-ea0c-401c-ac62-20df5d458956","Type":"ContainerStarted","Data":"c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335"} Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.323536 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9131dd4-ea0c-401c-ac62-20df5d458956","Type":"ContainerStarted","Data":"e203d7f7423d362eee19ffd8ea2c67b637b1a9ed6caf47e08423fa565c82f3e1"} Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.328356 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b2036add-c9d0-433b-ad33-c007af59c686","Type":"ContainerStarted","Data":"608c77490b7a6523a795afa582cd19f67cdae8049a493f4abd24899f22d7171c"} Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.328401 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b2036add-c9d0-433b-ad33-c007af59c686","Type":"ContainerStarted","Data":"8b4091253fc12a30f3e640d4f9f5fdabf376f37b7fb3e6f5b93b8f13f6bb211a"} Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.328488 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.339960 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.33992882 podStartE2EDuration="2.33992882s" podCreationTimestamp="2025-10-11 03:05:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:16.337631181 +0000 UTC m=+1127.270718825" watchObservedRunningTime="2025-10-11 03:05:16.33992882 +0000 UTC m=+1127.273016474" Oct 11 03:05:16 crc kubenswrapper[4953]: I1011 03:05:16.359474 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.9056253779999999 podStartE2EDuration="2.359446207s" podCreationTimestamp="2025-10-11 03:05:14 +0000 UTC" firstStartedPulling="2025-10-11 03:05:15.386821783 +0000 UTC m=+1126.319909427" lastFinishedPulling="2025-10-11 03:05:15.840642602 +0000 UTC m=+1126.773730256" observedRunningTime="2025-10-11 03:05:16.352344256 +0000 UTC m=+1127.285431890" watchObservedRunningTime="2025-10-11 03:05:16.359446207 +0000 UTC m=+1127.292533851" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.342019 4953 generic.go:334] "Generic (PLEG): container finished" podID="a189ef33-f82a-4186-96bb-8a397957e82f" containerID="04413c4528580daa8beb9e02576812be6058da05b6bda558d4922ae34f60cf2a" exitCode=0 Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.342221 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerDied","Data":"04413c4528580daa8beb9e02576812be6058da05b6bda558d4922ae34f60cf2a"} Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.549525 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.660975 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25q8k\" (UniqueName: \"kubernetes.io/projected/a189ef33-f82a-4186-96bb-8a397957e82f-kube-api-access-25q8k\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.661041 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-sg-core-conf-yaml\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.661115 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-scripts\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.661178 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-config-data\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.661267 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-log-httpd\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.661291 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-run-httpd\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.661351 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-combined-ca-bundle\") pod \"a189ef33-f82a-4186-96bb-8a397957e82f\" (UID: \"a189ef33-f82a-4186-96bb-8a397957e82f\") " Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.662247 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.664896 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.666523 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a189ef33-f82a-4186-96bb-8a397957e82f-kube-api-access-25q8k" (OuterVolumeSpecName: "kube-api-access-25q8k") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "kube-api-access-25q8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.669698 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-scripts" (OuterVolumeSpecName: "scripts") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.689063 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.750080 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.763816 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.763862 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a189ef33-f82a-4186-96bb-8a397957e82f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.763877 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.763891 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25q8k\" (UniqueName: \"kubernetes.io/projected/a189ef33-f82a-4186-96bb-8a397957e82f-kube-api-access-25q8k\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.763903 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.763915 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.766437 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-config-data" (OuterVolumeSpecName: "config-data") pod "a189ef33-f82a-4186-96bb-8a397957e82f" (UID: "a189ef33-f82a-4186-96bb-8a397957e82f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:17 crc kubenswrapper[4953]: I1011 03:05:17.865479 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a189ef33-f82a-4186-96bb-8a397957e82f-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.065236 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.065295 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.363216 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a189ef33-f82a-4186-96bb-8a397957e82f","Type":"ContainerDied","Data":"6c060b4823d0ca2f7847aeb30b35f5b92cbe0ef8669c3ee46f8b7efe89c24fb7"} Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.363310 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.363512 4953 scope.go:117] "RemoveContainer" containerID="ee32d46eef55ec9344c747304a30318c79c2e9eea9a18d115fcfaec41af3a46a" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.385151 4953 scope.go:117] "RemoveContainer" containerID="21d3840b4ba25dd1cbdc32e59e8d3feeed068de8c7199eabe8585b45f305903a" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.397319 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.407528 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.412152 4953 scope.go:117] "RemoveContainer" containerID="04413c4528580daa8beb9e02576812be6058da05b6bda558d4922ae34f60cf2a" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.422918 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:18 crc kubenswrapper[4953]: E1011 03:05:18.423404 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="sg-core" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423428 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="sg-core" Oct 11 03:05:18 crc kubenswrapper[4953]: E1011 03:05:18.423447 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-central-agent" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423455 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-central-agent" Oct 11 03:05:18 crc kubenswrapper[4953]: E1011 03:05:18.423469 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-notification-agent" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423477 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-notification-agent" Oct 11 03:05:18 crc kubenswrapper[4953]: E1011 03:05:18.423498 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="proxy-httpd" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423505 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="proxy-httpd" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423744 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-notification-agent" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423766 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="ceilometer-central-agent" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423788 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="proxy-httpd" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.423801 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" containerName="sg-core" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.426009 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.431401 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.431558 4953 scope.go:117] "RemoveContainer" containerID="444fa5b0c54166879207778b828b4c5556d57b71bfd632fa63c83b62850c851b" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.431825 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.431945 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.446007 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589255 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-scripts\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589304 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j8gl\" (UniqueName: \"kubernetes.io/projected/6b2919b9-f488-42f3-a12a-66c403b3a4f5-kube-api-access-6j8gl\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589330 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589528 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589643 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-run-httpd\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589681 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589750 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-config-data\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.589806 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-log-httpd\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695680 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-scripts\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695734 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j8gl\" (UniqueName: \"kubernetes.io/projected/6b2919b9-f488-42f3-a12a-66c403b3a4f5-kube-api-access-6j8gl\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695775 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695811 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695833 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-run-httpd\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695855 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695883 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-config-data\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.695914 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-log-httpd\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.696356 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-log-httpd\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.699853 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-scripts\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.700033 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.700107 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-run-httpd\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.709216 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.752163 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j8gl\" (UniqueName: \"kubernetes.io/projected/6b2919b9-f488-42f3-a12a-66c403b3a4f5-kube-api-access-6j8gl\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.771555 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:18 crc kubenswrapper[4953]: I1011 03:05:18.774476 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-config-data\") pod \"ceilometer-0\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " pod="openstack/ceilometer-0" Oct 11 03:05:19 crc kubenswrapper[4953]: I1011 03:05:19.054031 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:19 crc kubenswrapper[4953]: I1011 03:05:19.498534 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:19 crc kubenswrapper[4953]: W1011 03:05:19.502698 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b2919b9_f488_42f3_a12a_66c403b3a4f5.slice/crio-756615f2be83b5a7503edc8df25d3fe87b80fa0c809b30820ba4a58eaa9e0079 WatchSource:0}: Error finding container 756615f2be83b5a7503edc8df25d3fe87b80fa0c809b30820ba4a58eaa9e0079: Status 404 returned error can't find the container with id 756615f2be83b5a7503edc8df25d3fe87b80fa0c809b30820ba4a58eaa9e0079 Oct 11 03:05:19 crc kubenswrapper[4953]: I1011 03:05:19.794651 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 11 03:05:19 crc kubenswrapper[4953]: I1011 03:05:19.806115 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a189ef33-f82a-4186-96bb-8a397957e82f" path="/var/lib/kubelet/pods/a189ef33-f82a-4186-96bb-8a397957e82f/volumes" Oct 11 03:05:20 crc kubenswrapper[4953]: I1011 03:05:20.383303 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerStarted","Data":"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24"} Oct 11 03:05:20 crc kubenswrapper[4953]: I1011 03:05:20.385344 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerStarted","Data":"756615f2be83b5a7503edc8df25d3fe87b80fa0c809b30820ba4a58eaa9e0079"} Oct 11 03:05:21 crc kubenswrapper[4953]: I1011 03:05:21.399336 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerStarted","Data":"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1"} Oct 11 03:05:22 crc kubenswrapper[4953]: I1011 03:05:22.410636 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerStarted","Data":"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb"} Oct 11 03:05:22 crc kubenswrapper[4953]: I1011 03:05:22.635412 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 03:05:22 crc kubenswrapper[4953]: I1011 03:05:22.635509 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 03:05:23 crc kubenswrapper[4953]: I1011 03:05:23.065862 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 03:05:23 crc kubenswrapper[4953]: I1011 03:05:23.065903 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 03:05:23 crc kubenswrapper[4953]: I1011 03:05:23.717847 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.179:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 03:05:23 crc kubenswrapper[4953]: I1011 03:05:23.718076 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.179:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.148803 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.180:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.148795 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.180:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.436730 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerStarted","Data":"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1"} Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.436889 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.466146 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.413741544 podStartE2EDuration="6.466123401s" podCreationTimestamp="2025-10-11 03:05:18 +0000 UTC" firstStartedPulling="2025-10-11 03:05:19.505442188 +0000 UTC m=+1130.438529832" lastFinishedPulling="2025-10-11 03:05:23.557824045 +0000 UTC m=+1134.490911689" observedRunningTime="2025-10-11 03:05:24.457881601 +0000 UTC m=+1135.390969245" watchObservedRunningTime="2025-10-11 03:05:24.466123401 +0000 UTC m=+1135.399211045" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.678484 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.796978 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.828229 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 11 03:05:24 crc kubenswrapper[4953]: I1011 03:05:24.868321 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 11 03:05:25 crc kubenswrapper[4953]: I1011 03:05:25.482145 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 11 03:05:32 crc kubenswrapper[4953]: I1011 03:05:32.685187 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 03:05:32 crc kubenswrapper[4953]: I1011 03:05:32.686051 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 03:05:32 crc kubenswrapper[4953]: I1011 03:05:32.686684 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 03:05:32 crc kubenswrapper[4953]: I1011 03:05:32.702284 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.073045 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.077475 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.084461 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.533048 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.541804 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.549755 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.748082 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b856c5697-ld7pk"] Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.749456 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.771415 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b856c5697-ld7pk"] Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.798708 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-dns-svc\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.798793 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.798829 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.798859 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-config\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.798911 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll4bt\" (UniqueName: \"kubernetes.io/projected/bee71ad6-4395-4369-9501-7303bdae777e-kube-api-access-ll4bt\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.900382 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll4bt\" (UniqueName: \"kubernetes.io/projected/bee71ad6-4395-4369-9501-7303bdae777e-kube-api-access-ll4bt\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.900495 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-dns-svc\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.900535 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.900556 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.900576 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-config\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.901459 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-config\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.901546 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-nb\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.902066 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-sb\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.902223 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-dns-svc\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:33 crc kubenswrapper[4953]: I1011 03:05:33.923228 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll4bt\" (UniqueName: \"kubernetes.io/projected/bee71ad6-4395-4369-9501-7303bdae777e-kube-api-access-ll4bt\") pod \"dnsmasq-dns-5b856c5697-ld7pk\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:34 crc kubenswrapper[4953]: I1011 03:05:34.070095 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:34 crc kubenswrapper[4953]: I1011 03:05:34.574700 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b856c5697-ld7pk"] Oct 11 03:05:35 crc kubenswrapper[4953]: I1011 03:05:35.548316 4953 generic.go:334] "Generic (PLEG): container finished" podID="bee71ad6-4395-4369-9501-7303bdae777e" containerID="7a00521fc580f301d455574c19ef307653f0cd149265446d6e17fc6c3b07ef5c" exitCode=0 Oct 11 03:05:35 crc kubenswrapper[4953]: I1011 03:05:35.548445 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" event={"ID":"bee71ad6-4395-4369-9501-7303bdae777e","Type":"ContainerDied","Data":"7a00521fc580f301d455574c19ef307653f0cd149265446d6e17fc6c3b07ef5c"} Oct 11 03:05:35 crc kubenswrapper[4953]: I1011 03:05:35.549120 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" event={"ID":"bee71ad6-4395-4369-9501-7303bdae777e","Type":"ContainerStarted","Data":"a6ae7c089aa9c61ab1c331cc5a066914e0e2d5a1bf236ea33fa611df24955bb3"} Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.042392 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.042684 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-central-agent" containerID="cri-o://d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24" gracePeriod=30 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.042986 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="proxy-httpd" containerID="cri-o://65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1" gracePeriod=30 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.043169 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="sg-core" containerID="cri-o://ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb" gracePeriod=30 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.043520 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-notification-agent" containerID="cri-o://f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1" gracePeriod=30 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.056474 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.184:3000/\": EOF" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.316594 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.560853 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" event={"ID":"bee71ad6-4395-4369-9501-7303bdae777e","Type":"ContainerStarted","Data":"8112d8d716cd9e77403b5ff79d3b5ba8594714f7b5deb0ba01ab97620a94721a"} Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.561005 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.563140 4953 generic.go:334] "Generic (PLEG): container finished" podID="c8b103c6-0197-4872-8965-750ff1a487c7" containerID="d880235da6c9dd4ba0f1516b7e5579d2230bc415a5f17c7b398275b343f9d70b" exitCode=137 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.563187 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c8b103c6-0197-4872-8965-750ff1a487c7","Type":"ContainerDied","Data":"d880235da6c9dd4ba0f1516b7e5579d2230bc415a5f17c7b398275b343f9d70b"} Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.563205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c8b103c6-0197-4872-8965-750ff1a487c7","Type":"ContainerDied","Data":"b70db375fe35db13a980c7246ac4b3d005d0c945110e217a4fa9429db280c081"} Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.563215 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b70db375fe35db13a980c7246ac4b3d005d0c945110e217a4fa9429db280c081" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.565390 4953 generic.go:334] "Generic (PLEG): container finished" podID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerID="65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1" exitCode=0 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.565408 4953 generic.go:334] "Generic (PLEG): container finished" podID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerID="ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb" exitCode=2 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.565593 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-log" containerID="cri-o://83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e" gracePeriod=30 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.565673 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerDied","Data":"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1"} Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.565692 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerDied","Data":"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb"} Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.565738 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-api" containerID="cri-o://d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963" gracePeriod=30 Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.587387 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" podStartSLOduration=3.587370697 podStartE2EDuration="3.587370697s" podCreationTimestamp="2025-10-11 03:05:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:36.581458186 +0000 UTC m=+1147.514545850" watchObservedRunningTime="2025-10-11 03:05:36.587370697 +0000 UTC m=+1147.520458341" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.609697 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.680346 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftgcj\" (UniqueName: \"kubernetes.io/projected/c8b103c6-0197-4872-8965-750ff1a487c7-kube-api-access-ftgcj\") pod \"c8b103c6-0197-4872-8965-750ff1a487c7\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.680405 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-combined-ca-bundle\") pod \"c8b103c6-0197-4872-8965-750ff1a487c7\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.680495 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-config-data\") pod \"c8b103c6-0197-4872-8965-750ff1a487c7\" (UID: \"c8b103c6-0197-4872-8965-750ff1a487c7\") " Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.685656 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8b103c6-0197-4872-8965-750ff1a487c7-kube-api-access-ftgcj" (OuterVolumeSpecName: "kube-api-access-ftgcj") pod "c8b103c6-0197-4872-8965-750ff1a487c7" (UID: "c8b103c6-0197-4872-8965-750ff1a487c7"). InnerVolumeSpecName "kube-api-access-ftgcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.712522 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-config-data" (OuterVolumeSpecName: "config-data") pod "c8b103c6-0197-4872-8965-750ff1a487c7" (UID: "c8b103c6-0197-4872-8965-750ff1a487c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.718398 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8b103c6-0197-4872-8965-750ff1a487c7" (UID: "c8b103c6-0197-4872-8965-750ff1a487c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.783555 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftgcj\" (UniqueName: \"kubernetes.io/projected/c8b103c6-0197-4872-8965-750ff1a487c7-kube-api-access-ftgcj\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.783631 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:36 crc kubenswrapper[4953]: I1011 03:05:36.783642 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8b103c6-0197-4872-8965-750ff1a487c7-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.445706 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502526 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-scripts\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502593 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j8gl\" (UniqueName: \"kubernetes.io/projected/6b2919b9-f488-42f3-a12a-66c403b3a4f5-kube-api-access-6j8gl\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502674 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-combined-ca-bundle\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502750 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-config-data\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502785 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-run-httpd\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502809 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-ceilometer-tls-certs\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502881 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-sg-core-conf-yaml\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.502908 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-log-httpd\") pod \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\" (UID: \"6b2919b9-f488-42f3-a12a-66c403b3a4f5\") " Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.503173 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.503311 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.503474 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.507555 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-scripts" (OuterVolumeSpecName: "scripts") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.508760 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b2919b9-f488-42f3-a12a-66c403b3a4f5-kube-api-access-6j8gl" (OuterVolumeSpecName: "kube-api-access-6j8gl") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "kube-api-access-6j8gl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.534706 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.560535 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.585776 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599375 4953 generic.go:334] "Generic (PLEG): container finished" podID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerID="f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1" exitCode=0 Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599409 4953 generic.go:334] "Generic (PLEG): container finished" podID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerID="d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24" exitCode=0 Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599457 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599483 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerDied","Data":"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1"} Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599512 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerDied","Data":"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24"} Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599522 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b2919b9-f488-42f3-a12a-66c403b3a4f5","Type":"ContainerDied","Data":"756615f2be83b5a7503edc8df25d3fe87b80fa0c809b30820ba4a58eaa9e0079"} Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.599536 4953 scope.go:117] "RemoveContainer" containerID="65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.606450 4953 generic.go:334] "Generic (PLEG): container finished" podID="1199c05f-e229-403e-a482-3e09c7731bb5" containerID="83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e" exitCode=143 Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.606995 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1199c05f-e229-403e-a482-3e09c7731bb5","Type":"ContainerDied","Data":"83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e"} Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607095 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607326 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b2919b9-f488-42f3-a12a-66c403b3a4f5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607514 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607789 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j8gl\" (UniqueName: \"kubernetes.io/projected/6b2919b9-f488-42f3-a12a-66c403b3a4f5-kube-api-access-6j8gl\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607809 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607822 4953 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.607834 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.631570 4953 scope.go:117] "RemoveContainer" containerID="ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.651048 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-config-data" (OuterVolumeSpecName: "config-data") pod "6b2919b9-f488-42f3-a12a-66c403b3a4f5" (UID: "6b2919b9-f488-42f3-a12a-66c403b3a4f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.653267 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.666188 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.678297 4953 scope.go:117] "RemoveContainer" containerID="f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679276 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.679671 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="proxy-httpd" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679685 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="proxy-httpd" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.679699 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="sg-core" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679706 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="sg-core" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.679727 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-notification-agent" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679733 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-notification-agent" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.679748 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8b103c6-0197-4872-8965-750ff1a487c7" containerName="nova-cell1-novncproxy-novncproxy" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679754 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8b103c6-0197-4872-8965-750ff1a487c7" containerName="nova-cell1-novncproxy-novncproxy" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.679764 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-central-agent" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679771 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-central-agent" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679930 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-central-agent" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679942 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="proxy-httpd" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679957 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8b103c6-0197-4872-8965-750ff1a487c7" containerName="nova-cell1-novncproxy-novncproxy" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679965 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="sg-core" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.679978 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" containerName="ceilometer-notification-agent" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.681403 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.688334 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.688815 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.689157 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.692688 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.709486 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2919b9-f488-42f3-a12a-66c403b3a4f5-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.714061 4953 scope.go:117] "RemoveContainer" containerID="d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.736562 4953 scope.go:117] "RemoveContainer" containerID="65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.737421 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1\": container with ID starting with 65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1 not found: ID does not exist" containerID="65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.737484 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1"} err="failed to get container status \"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1\": rpc error: code = NotFound desc = could not find container \"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1\": container with ID starting with 65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1 not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.737523 4953 scope.go:117] "RemoveContainer" containerID="ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.739251 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb\": container with ID starting with ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb not found: ID does not exist" containerID="ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.739292 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb"} err="failed to get container status \"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb\": rpc error: code = NotFound desc = could not find container \"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb\": container with ID starting with ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.739321 4953 scope.go:117] "RemoveContainer" containerID="f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.739750 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1\": container with ID starting with f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1 not found: ID does not exist" containerID="f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.739777 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1"} err="failed to get container status \"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1\": rpc error: code = NotFound desc = could not find container \"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1\": container with ID starting with f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1 not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.739793 4953 scope.go:117] "RemoveContainer" containerID="d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24" Oct 11 03:05:37 crc kubenswrapper[4953]: E1011 03:05:37.740144 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24\": container with ID starting with d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24 not found: ID does not exist" containerID="d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.740169 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24"} err="failed to get container status \"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24\": rpc error: code = NotFound desc = could not find container \"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24\": container with ID starting with d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24 not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.740184 4953 scope.go:117] "RemoveContainer" containerID="65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.740497 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1"} err="failed to get container status \"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1\": rpc error: code = NotFound desc = could not find container \"65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1\": container with ID starting with 65d6c2fb2743d400f5a56ae30831be8dc56b6c36a4ee9c9dd9184176b54ef6f1 not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.740517 4953 scope.go:117] "RemoveContainer" containerID="ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.740928 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb"} err="failed to get container status \"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb\": rpc error: code = NotFound desc = could not find container \"ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb\": container with ID starting with ac201d422c7b95a9a88d21c88e3c4e34624c7fe48f42193e61561484638d6cdb not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.740952 4953 scope.go:117] "RemoveContainer" containerID="f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.741407 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1"} err="failed to get container status \"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1\": rpc error: code = NotFound desc = could not find container \"f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1\": container with ID starting with f84addc18498d16595e46583e96894b70863fdf06f7847b027c5e916044c14a1 not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.741431 4953 scope.go:117] "RemoveContainer" containerID="d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.741877 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24"} err="failed to get container status \"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24\": rpc error: code = NotFound desc = could not find container \"d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24\": container with ID starting with d0cf5386e5f9d3054759892d31fbfa99b81c4267096fd6ac2e22679a795aff24 not found: ID does not exist" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.805805 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8b103c6-0197-4872-8965-750ff1a487c7" path="/var/lib/kubelet/pods/c8b103c6-0197-4872-8965-750ff1a487c7/volumes" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.810587 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv4bq\" (UniqueName: \"kubernetes.io/projected/6eb51273-e586-481d-b374-82863923b150-kube-api-access-xv4bq\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.810661 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.810681 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.810717 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.810784 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.912452 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.912528 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv4bq\" (UniqueName: \"kubernetes.io/projected/6eb51273-e586-481d-b374-82863923b150-kube-api-access-xv4bq\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.912589 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.912622 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.912660 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.916247 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.918047 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.919728 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.920648 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6eb51273-e586-481d-b374-82863923b150-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.929792 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.939162 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.940360 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv4bq\" (UniqueName: \"kubernetes.io/projected/6eb51273-e586-481d-b374-82863923b150-kube-api-access-xv4bq\") pod \"nova-cell1-novncproxy-0\" (UID: \"6eb51273-e586-481d-b374-82863923b150\") " pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.946081 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.948089 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.950419 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.950460 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.950527 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:05:37 crc kubenswrapper[4953]: I1011 03:05:37.963007 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.010424 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014252 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014324 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-run-httpd\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014367 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rwhv\" (UniqueName: \"kubernetes.io/projected/57d85613-6f80-4ecb-842b-5b7e3af58c7a-kube-api-access-2rwhv\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014397 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-config-data\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014421 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-log-httpd\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014517 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014551 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.014577 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-scripts\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118529 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118629 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-run-httpd\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118671 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rwhv\" (UniqueName: \"kubernetes.io/projected/57d85613-6f80-4ecb-842b-5b7e3af58c7a-kube-api-access-2rwhv\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118705 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-config-data\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118729 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-log-httpd\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118815 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118843 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.118872 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-scripts\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.119519 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-run-httpd\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.119767 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-log-httpd\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.126876 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.127175 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-config-data\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.128141 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.138511 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-scripts\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.138649 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.142283 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rwhv\" (UniqueName: \"kubernetes.io/projected/57d85613-6f80-4ecb-842b-5b7e3af58c7a-kube-api-access-2rwhv\") pod \"ceilometer-0\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.267089 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.275488 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.489019 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 11 03:05:38 crc kubenswrapper[4953]: W1011 03:05:38.494684 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6eb51273_e586_481d_b374_82863923b150.slice/crio-8f4ab4924d0c03777ae61c236cc4cd37297007205774b36205344421ba0c0927 WatchSource:0}: Error finding container 8f4ab4924d0c03777ae61c236cc4cd37297007205774b36205344421ba0c0927: Status 404 returned error can't find the container with id 8f4ab4924d0c03777ae61c236cc4cd37297007205774b36205344421ba0c0927 Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.588080 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:38 crc kubenswrapper[4953]: W1011 03:05:38.598487 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57d85613_6f80_4ecb_842b_5b7e3af58c7a.slice/crio-6d5d1eb249e0f025382ff3ede9e2f2f384d83bfc5d75f628bd7eea9c79a0b1ed WatchSource:0}: Error finding container 6d5d1eb249e0f025382ff3ede9e2f2f384d83bfc5d75f628bd7eea9c79a0b1ed: Status 404 returned error can't find the container with id 6d5d1eb249e0f025382ff3ede9e2f2f384d83bfc5d75f628bd7eea9c79a0b1ed Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.644996 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerStarted","Data":"6d5d1eb249e0f025382ff3ede9e2f2f384d83bfc5d75f628bd7eea9c79a0b1ed"} Oct 11 03:05:38 crc kubenswrapper[4953]: I1011 03:05:38.648557 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6eb51273-e586-481d-b374-82863923b150","Type":"ContainerStarted","Data":"8f4ab4924d0c03777ae61c236cc4cd37297007205774b36205344421ba0c0927"} Oct 11 03:05:39 crc kubenswrapper[4953]: I1011 03:05:39.661351 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerStarted","Data":"cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc"} Oct 11 03:05:39 crc kubenswrapper[4953]: I1011 03:05:39.664112 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6eb51273-e586-481d-b374-82863923b150","Type":"ContainerStarted","Data":"cbca3e6b8119a48caa16eced4cfa7b973040094a4e44adcddc96959ea16b3b4f"} Oct 11 03:05:39 crc kubenswrapper[4953]: I1011 03:05:39.683585 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.683571549 podStartE2EDuration="2.683571549s" podCreationTimestamp="2025-10-11 03:05:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:39.681977519 +0000 UTC m=+1150.615065163" watchObservedRunningTime="2025-10-11 03:05:39.683571549 +0000 UTC m=+1150.616659193" Oct 11 03:05:39 crc kubenswrapper[4953]: I1011 03:05:39.807885 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b2919b9-f488-42f3-a12a-66c403b3a4f5" path="/var/lib/kubelet/pods/6b2919b9-f488-42f3-a12a-66c403b3a4f5/volumes" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.107003 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.158292 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1199c05f-e229-403e-a482-3e09c7731bb5-logs\") pod \"1199c05f-e229-403e-a482-3e09c7731bb5\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.158424 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-config-data\") pod \"1199c05f-e229-403e-a482-3e09c7731bb5\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.158470 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxv89\" (UniqueName: \"kubernetes.io/projected/1199c05f-e229-403e-a482-3e09c7731bb5-kube-api-access-wxv89\") pod \"1199c05f-e229-403e-a482-3e09c7731bb5\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.158495 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-combined-ca-bundle\") pod \"1199c05f-e229-403e-a482-3e09c7731bb5\" (UID: \"1199c05f-e229-403e-a482-3e09c7731bb5\") " Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.167042 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1199c05f-e229-403e-a482-3e09c7731bb5-logs" (OuterVolumeSpecName: "logs") pod "1199c05f-e229-403e-a482-3e09c7731bb5" (UID: "1199c05f-e229-403e-a482-3e09c7731bb5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.170546 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1199c05f-e229-403e-a482-3e09c7731bb5-kube-api-access-wxv89" (OuterVolumeSpecName: "kube-api-access-wxv89") pod "1199c05f-e229-403e-a482-3e09c7731bb5" (UID: "1199c05f-e229-403e-a482-3e09c7731bb5"). InnerVolumeSpecName "kube-api-access-wxv89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.203551 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1199c05f-e229-403e-a482-3e09c7731bb5" (UID: "1199c05f-e229-403e-a482-3e09c7731bb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.222769 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-config-data" (OuterVolumeSpecName: "config-data") pod "1199c05f-e229-403e-a482-3e09c7731bb5" (UID: "1199c05f-e229-403e-a482-3e09c7731bb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.260630 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.260674 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxv89\" (UniqueName: \"kubernetes.io/projected/1199c05f-e229-403e-a482-3e09c7731bb5-kube-api-access-wxv89\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.260684 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1199c05f-e229-403e-a482-3e09c7731bb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.260694 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1199c05f-e229-403e-a482-3e09c7731bb5-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.684908 4953 generic.go:334] "Generic (PLEG): container finished" podID="1199c05f-e229-403e-a482-3e09c7731bb5" containerID="d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963" exitCode=0 Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.684991 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.685013 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1199c05f-e229-403e-a482-3e09c7731bb5","Type":"ContainerDied","Data":"d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963"} Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.685413 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1199c05f-e229-403e-a482-3e09c7731bb5","Type":"ContainerDied","Data":"fd6f59e16549eb3d72a3ed2e456508fffc16a8f6830aef8e558dd7239749f905"} Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.685446 4953 scope.go:117] "RemoveContainer" containerID="d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.692350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerStarted","Data":"8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e"} Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.715757 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.723316 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.728820 4953 scope.go:117] "RemoveContainer" containerID="83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.737321 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:40 crc kubenswrapper[4953]: E1011 03:05:40.737672 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-log" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.737683 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-log" Oct 11 03:05:40 crc kubenswrapper[4953]: E1011 03:05:40.737700 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-api" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.737715 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-api" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.737876 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-api" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.737889 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" containerName="nova-api-log" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.738717 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.740663 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.740770 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.741738 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.792146 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.841235 4953 scope.go:117] "RemoveContainer" containerID="d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963" Oct 11 03:05:40 crc kubenswrapper[4953]: E1011 03:05:40.841755 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963\": container with ID starting with d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963 not found: ID does not exist" containerID="d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.841792 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963"} err="failed to get container status \"d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963\": rpc error: code = NotFound desc = could not find container \"d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963\": container with ID starting with d0a092a657c745a1f63951aa4fd8a11c88e17c211ba45b089b4fbeed67594963 not found: ID does not exist" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.841818 4953 scope.go:117] "RemoveContainer" containerID="83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e" Oct 11 03:05:40 crc kubenswrapper[4953]: E1011 03:05:40.842315 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e\": container with ID starting with 83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e not found: ID does not exist" containerID="83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.842356 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e"} err="failed to get container status \"83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e\": rpc error: code = NotFound desc = could not find container \"83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e\": container with ID starting with 83417b8b36015505968f57d9e9a586b38f172afab43be41c6a05b586496d717e not found: ID does not exist" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.869820 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v269r\" (UniqueName: \"kubernetes.io/projected/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-kube-api-access-v269r\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.869962 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-public-tls-certs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.870208 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-logs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.870329 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.870374 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.870407 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-config-data\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.971452 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.971533 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-config-data\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.972234 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v269r\" (UniqueName: \"kubernetes.io/projected/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-kube-api-access-v269r\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.972284 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-public-tls-certs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.972329 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-logs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.972392 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.972975 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-logs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.976043 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-config-data\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.976301 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.976366 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.979043 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-public-tls-certs\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:40 crc kubenswrapper[4953]: I1011 03:05:40.997333 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v269r\" (UniqueName: \"kubernetes.io/projected/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-kube-api-access-v269r\") pod \"nova-api-0\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " pod="openstack/nova-api-0" Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.133712 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.316712 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.316772 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.590306 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:41 crc kubenswrapper[4953]: W1011 03:05:41.596433 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a6e4fef_1ea6_49ad_bc9b_5a0a7f452a80.slice/crio-47de1115e022d89894156f5094534476ba628c3188a064cbbd5c3e71a2e3bf49 WatchSource:0}: Error finding container 47de1115e022d89894156f5094534476ba628c3188a064cbbd5c3e71a2e3bf49: Status 404 returned error can't find the container with id 47de1115e022d89894156f5094534476ba628c3188a064cbbd5c3e71a2e3bf49 Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.706150 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerStarted","Data":"8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba"} Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.707449 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80","Type":"ContainerStarted","Data":"47de1115e022d89894156f5094534476ba628c3188a064cbbd5c3e71a2e3bf49"} Oct 11 03:05:41 crc kubenswrapper[4953]: I1011 03:05:41.806990 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1199c05f-e229-403e-a482-3e09c7731bb5" path="/var/lib/kubelet/pods/1199c05f-e229-403e-a482-3e09c7731bb5/volumes" Oct 11 03:05:42 crc kubenswrapper[4953]: I1011 03:05:42.717868 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80","Type":"ContainerStarted","Data":"d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb"} Oct 11 03:05:42 crc kubenswrapper[4953]: I1011 03:05:42.718448 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80","Type":"ContainerStarted","Data":"a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb"} Oct 11 03:05:42 crc kubenswrapper[4953]: I1011 03:05:42.746288 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.746270139 podStartE2EDuration="2.746270139s" podCreationTimestamp="2025-10-11 03:05:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:42.73647559 +0000 UTC m=+1153.669563234" watchObservedRunningTime="2025-10-11 03:05:42.746270139 +0000 UTC m=+1153.679357783" Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.011421 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.730436 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerStarted","Data":"cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb"} Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.730631 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="proxy-httpd" containerID="cri-o://cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb" gracePeriod=30 Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.730587 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-central-agent" containerID="cri-o://cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc" gracePeriod=30 Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.730678 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="sg-core" containerID="cri-o://8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba" gracePeriod=30 Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.730683 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-notification-agent" containerID="cri-o://8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e" gracePeriod=30 Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.731323 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:05:43 crc kubenswrapper[4953]: I1011 03:05:43.763118 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.751190101 podStartE2EDuration="6.763077217s" podCreationTimestamp="2025-10-11 03:05:37 +0000 UTC" firstStartedPulling="2025-10-11 03:05:38.601082528 +0000 UTC m=+1149.534170172" lastFinishedPulling="2025-10-11 03:05:42.612969644 +0000 UTC m=+1153.546057288" observedRunningTime="2025-10-11 03:05:43.759458045 +0000 UTC m=+1154.692545689" watchObservedRunningTime="2025-10-11 03:05:43.763077217 +0000 UTC m=+1154.696164861" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.071764 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.130282 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-566b5b7845-rwqsc"] Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.130492 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerName="dnsmasq-dns" containerID="cri-o://075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b" gracePeriod=10 Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.534500 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.641546 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-sb\") pod \"42c8ee30-3526-44f7-9745-3a14b3adbe89\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.643775 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-dns-svc\") pod \"42c8ee30-3526-44f7-9745-3a14b3adbe89\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.643939 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wdnx\" (UniqueName: \"kubernetes.io/projected/42c8ee30-3526-44f7-9745-3a14b3adbe89-kube-api-access-8wdnx\") pod \"42c8ee30-3526-44f7-9745-3a14b3adbe89\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.644086 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-nb\") pod \"42c8ee30-3526-44f7-9745-3a14b3adbe89\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.644165 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-config\") pod \"42c8ee30-3526-44f7-9745-3a14b3adbe89\" (UID: \"42c8ee30-3526-44f7-9745-3a14b3adbe89\") " Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.649031 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42c8ee30-3526-44f7-9745-3a14b3adbe89-kube-api-access-8wdnx" (OuterVolumeSpecName: "kube-api-access-8wdnx") pod "42c8ee30-3526-44f7-9745-3a14b3adbe89" (UID: "42c8ee30-3526-44f7-9745-3a14b3adbe89"). InnerVolumeSpecName "kube-api-access-8wdnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.687394 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "42c8ee30-3526-44f7-9745-3a14b3adbe89" (UID: "42c8ee30-3526-44f7-9745-3a14b3adbe89"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.694963 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-config" (OuterVolumeSpecName: "config") pod "42c8ee30-3526-44f7-9745-3a14b3adbe89" (UID: "42c8ee30-3526-44f7-9745-3a14b3adbe89"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.695332 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "42c8ee30-3526-44f7-9745-3a14b3adbe89" (UID: "42c8ee30-3526-44f7-9745-3a14b3adbe89"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.701147 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42c8ee30-3526-44f7-9745-3a14b3adbe89" (UID: "42c8ee30-3526-44f7-9745-3a14b3adbe89"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.740497 4953 generic.go:334] "Generic (PLEG): container finished" podID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerID="075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b" exitCode=0 Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.740563 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" event={"ID":"42c8ee30-3526-44f7-9745-3a14b3adbe89","Type":"ContainerDied","Data":"075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b"} Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.740591 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" event={"ID":"42c8ee30-3526-44f7-9745-3a14b3adbe89","Type":"ContainerDied","Data":"bca495349c4140502a02126d61b92108e206e9c903cd0adfab60d27a626b5053"} Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.740622 4953 scope.go:117] "RemoveContainer" containerID="075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.740754 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-566b5b7845-rwqsc" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746028 4953 generic.go:334] "Generic (PLEG): container finished" podID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerID="8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba" exitCode=2 Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746060 4953 generic.go:334] "Generic (PLEG): container finished" podID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerID="8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e" exitCode=0 Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746069 4953 generic.go:334] "Generic (PLEG): container finished" podID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerID="cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc" exitCode=0 Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746081 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerDied","Data":"8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba"} Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746141 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerDied","Data":"8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e"} Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746156 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerDied","Data":"cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc"} Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746755 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wdnx\" (UniqueName: \"kubernetes.io/projected/42c8ee30-3526-44f7-9745-3a14b3adbe89-kube-api-access-8wdnx\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746800 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746825 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746848 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.746868 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42c8ee30-3526-44f7-9745-3a14b3adbe89-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.775103 4953 scope.go:117] "RemoveContainer" containerID="0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.784705 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-566b5b7845-rwqsc"] Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.792872 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-566b5b7845-rwqsc"] Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.798788 4953 scope.go:117] "RemoveContainer" containerID="075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b" Oct 11 03:05:44 crc kubenswrapper[4953]: E1011 03:05:44.799292 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b\": container with ID starting with 075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b not found: ID does not exist" containerID="075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.799344 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b"} err="failed to get container status \"075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b\": rpc error: code = NotFound desc = could not find container \"075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b\": container with ID starting with 075e609fc61fb54a965ea83aa0509af257865e6dc54a33d8536304fb855e359b not found: ID does not exist" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.799372 4953 scope.go:117] "RemoveContainer" containerID="0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19" Oct 11 03:05:44 crc kubenswrapper[4953]: E1011 03:05:44.799983 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19\": container with ID starting with 0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19 not found: ID does not exist" containerID="0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19" Oct 11 03:05:44 crc kubenswrapper[4953]: I1011 03:05:44.800025 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19"} err="failed to get container status \"0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19\": rpc error: code = NotFound desc = could not find container \"0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19\": container with ID starting with 0e86533bfa722ab0b1d113e1a8d0eb398461847b21a43932095b1df794dbac19 not found: ID does not exist" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.520552 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562076 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-log-httpd\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562138 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-run-httpd\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562267 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rwhv\" (UniqueName: \"kubernetes.io/projected/57d85613-6f80-4ecb-842b-5b7e3af58c7a-kube-api-access-2rwhv\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562353 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-scripts\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562376 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-combined-ca-bundle\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562401 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-config-data\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562456 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-ceilometer-tls-certs\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562555 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-sg-core-conf-yaml\") pod \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\" (UID: \"57d85613-6f80-4ecb-842b-5b7e3af58c7a\") " Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.562758 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.563013 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.563314 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.566685 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-scripts" (OuterVolumeSpecName: "scripts") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.568408 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57d85613-6f80-4ecb-842b-5b7e3af58c7a-kube-api-access-2rwhv" (OuterVolumeSpecName: "kube-api-access-2rwhv") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "kube-api-access-2rwhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.591330 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.611738 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.630930 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.656508 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-config-data" (OuterVolumeSpecName: "config-data") pod "57d85613-6f80-4ecb-842b-5b7e3af58c7a" (UID: "57d85613-6f80-4ecb-842b-5b7e3af58c7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664762 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664793 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/57d85613-6f80-4ecb-842b-5b7e3af58c7a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664804 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rwhv\" (UniqueName: \"kubernetes.io/projected/57d85613-6f80-4ecb-842b-5b7e3af58c7a-kube-api-access-2rwhv\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664814 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664823 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664834 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.664842 4953 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d85613-6f80-4ecb-842b-5b7e3af58c7a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.766912 4953 generic.go:334] "Generic (PLEG): container finished" podID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerID="cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb" exitCode=0 Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.766951 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerDied","Data":"cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb"} Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.766973 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"57d85613-6f80-4ecb-842b-5b7e3af58c7a","Type":"ContainerDied","Data":"6d5d1eb249e0f025382ff3ede9e2f2f384d83bfc5d75f628bd7eea9c79a0b1ed"} Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.766990 4953 scope.go:117] "RemoveContainer" containerID="cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.767042 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.785701 4953 scope.go:117] "RemoveContainer" containerID="8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.802993 4953 scope.go:117] "RemoveContainer" containerID="8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.815186 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" path="/var/lib/kubelet/pods/42c8ee30-3526-44f7-9745-3a14b3adbe89/volumes" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.816173 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.820177 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.829041 4953 scope.go:117] "RemoveContainer" containerID="cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.847730 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.848225 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-notification-agent" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848240 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-notification-agent" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.848263 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="sg-core" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848271 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="sg-core" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.848282 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="proxy-httpd" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848290 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="proxy-httpd" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.848311 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerName="init" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848319 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerName="init" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.848329 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerName="dnsmasq-dns" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848350 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerName="dnsmasq-dns" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.848367 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-central-agent" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848375 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-central-agent" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848625 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-central-agent" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848639 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="42c8ee30-3526-44f7-9745-3a14b3adbe89" containerName="dnsmasq-dns" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848664 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="ceilometer-notification-agent" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848688 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="proxy-httpd" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.848698 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" containerName="sg-core" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.850303 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.855171 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.855374 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.855573 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.867897 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-scripts\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.867949 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.867967 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.867990 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-log-httpd\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.868027 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-run-httpd\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.868055 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pghg2\" (UniqueName: \"kubernetes.io/projected/d6802295-eeec-4abf-85f7-6c909c6bcbc2-kube-api-access-pghg2\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.868072 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-config-data\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.868093 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.875308 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.914765 4953 scope.go:117] "RemoveContainer" containerID="cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.915230 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb\": container with ID starting with cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb not found: ID does not exist" containerID="cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.915279 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb"} err="failed to get container status \"cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb\": rpc error: code = NotFound desc = could not find container \"cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb\": container with ID starting with cfa80f50681a0a92b6a7c88f8210f701c3199d6e99dcd797743063e85c7584cb not found: ID does not exist" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.915305 4953 scope.go:117] "RemoveContainer" containerID="8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.915725 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba\": container with ID starting with 8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba not found: ID does not exist" containerID="8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.915761 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba"} err="failed to get container status \"8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba\": rpc error: code = NotFound desc = could not find container \"8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba\": container with ID starting with 8624ad1d92204ee986a2356aa5d8e435c3a493fc0861ae473a28915fb1a5a4ba not found: ID does not exist" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.915788 4953 scope.go:117] "RemoveContainer" containerID="8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.916201 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e\": container with ID starting with 8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e not found: ID does not exist" containerID="8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.916221 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e"} err="failed to get container status \"8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e\": rpc error: code = NotFound desc = could not find container \"8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e\": container with ID starting with 8ddb426a5f156f5c5f761459fbdebe9f52038acaeb516ea8b5a39af03fc7d76e not found: ID does not exist" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.916235 4953 scope.go:117] "RemoveContainer" containerID="cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc" Oct 11 03:05:45 crc kubenswrapper[4953]: E1011 03:05:45.916470 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc\": container with ID starting with cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc not found: ID does not exist" containerID="cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.916501 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc"} err="failed to get container status \"cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc\": rpc error: code = NotFound desc = could not find container \"cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc\": container with ID starting with cdbe7322d3adc2ec678be458c444bda3ae483d8806fbc9a7afd99d70910690fc not found: ID does not exist" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969790 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-scripts\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969840 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969859 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969878 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-log-httpd\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969918 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-run-httpd\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969938 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pghg2\" (UniqueName: \"kubernetes.io/projected/d6802295-eeec-4abf-85f7-6c909c6bcbc2-kube-api-access-pghg2\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969957 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-config-data\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.969980 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.970419 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-log-httpd\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.970592 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-run-httpd\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.973125 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.973878 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.974356 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-scripts\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.976087 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-config-data\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.991643 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:45 crc kubenswrapper[4953]: I1011 03:05:45.994427 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pghg2\" (UniqueName: \"kubernetes.io/projected/d6802295-eeec-4abf-85f7-6c909c6bcbc2-kube-api-access-pghg2\") pod \"ceilometer-0\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " pod="openstack/ceilometer-0" Oct 11 03:05:46 crc kubenswrapper[4953]: I1011 03:05:46.213219 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:05:46 crc kubenswrapper[4953]: W1011 03:05:46.727222 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice/crio-5fbb9231dc7aefadc180c226cd0e8376f164900f474cb27437ff9de7b63f5883 WatchSource:0}: Error finding container 5fbb9231dc7aefadc180c226cd0e8376f164900f474cb27437ff9de7b63f5883: Status 404 returned error can't find the container with id 5fbb9231dc7aefadc180c226cd0e8376f164900f474cb27437ff9de7b63f5883 Oct 11 03:05:46 crc kubenswrapper[4953]: I1011 03:05:46.729067 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:05:46 crc kubenswrapper[4953]: I1011 03:05:46.775010 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerStarted","Data":"5fbb9231dc7aefadc180c226cd0e8376f164900f474cb27437ff9de7b63f5883"} Oct 11 03:05:47 crc kubenswrapper[4953]: I1011 03:05:47.832010 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57d85613-6f80-4ecb-842b-5b7e3af58c7a" path="/var/lib/kubelet/pods/57d85613-6f80-4ecb-842b-5b7e3af58c7a/volumes" Oct 11 03:05:47 crc kubenswrapper[4953]: I1011 03:05:47.838662 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerStarted","Data":"ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179"} Oct 11 03:05:48 crc kubenswrapper[4953]: I1011 03:05:48.011299 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:48 crc kubenswrapper[4953]: I1011 03:05:48.046809 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:48 crc kubenswrapper[4953]: I1011 03:05:48.853490 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerStarted","Data":"5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a"} Oct 11 03:05:48 crc kubenswrapper[4953]: I1011 03:05:48.869398 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.037295 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rn257"] Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.041320 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.044686 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.045962 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.050064 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rn257"] Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.057590 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-config-data\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.057688 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.057730 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7cmp\" (UniqueName: \"kubernetes.io/projected/3ea69ea4-ed2c-4527-94de-981b609f1cca-kube-api-access-f7cmp\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.057799 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-scripts\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.159383 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-config-data\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.159470 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.159531 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7cmp\" (UniqueName: \"kubernetes.io/projected/3ea69ea4-ed2c-4527-94de-981b609f1cca-kube-api-access-f7cmp\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.159643 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-scripts\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.163474 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-config-data\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.164045 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-scripts\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.167594 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.181698 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7cmp\" (UniqueName: \"kubernetes.io/projected/3ea69ea4-ed2c-4527-94de-981b609f1cca-kube-api-access-f7cmp\") pod \"nova-cell1-cell-mapping-rn257\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.366258 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.816927 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rn257"] Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.881947 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerStarted","Data":"98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830"} Oct 11 03:05:49 crc kubenswrapper[4953]: I1011 03:05:49.885908 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rn257" event={"ID":"3ea69ea4-ed2c-4527-94de-981b609f1cca","Type":"ContainerStarted","Data":"19d27cc66ebb6a7017608d58fba8ae6e5d8f97cdca5c77fcee8ceb63bbffef62"} Oct 11 03:05:50 crc kubenswrapper[4953]: I1011 03:05:50.897643 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerStarted","Data":"80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0"} Oct 11 03:05:50 crc kubenswrapper[4953]: I1011 03:05:50.898131 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:05:50 crc kubenswrapper[4953]: I1011 03:05:50.899739 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rn257" event={"ID":"3ea69ea4-ed2c-4527-94de-981b609f1cca","Type":"ContainerStarted","Data":"5ed42581fe9042b94858040ec0efad065ffe12a0f61c84ff6bb546094183dcfb"} Oct 11 03:05:50 crc kubenswrapper[4953]: I1011 03:05:50.964630 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.517081834 podStartE2EDuration="5.964590465s" podCreationTimestamp="2025-10-11 03:05:45 +0000 UTC" firstStartedPulling="2025-10-11 03:05:46.729970276 +0000 UTC m=+1157.663057920" lastFinishedPulling="2025-10-11 03:05:50.177478907 +0000 UTC m=+1161.110566551" observedRunningTime="2025-10-11 03:05:50.95220635 +0000 UTC m=+1161.885293984" watchObservedRunningTime="2025-10-11 03:05:50.964590465 +0000 UTC m=+1161.897678109" Oct 11 03:05:50 crc kubenswrapper[4953]: I1011 03:05:50.981976 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rn257" podStartSLOduration=1.981949067 podStartE2EDuration="1.981949067s" podCreationTimestamp="2025-10-11 03:05:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:05:50.971080451 +0000 UTC m=+1161.904168095" watchObservedRunningTime="2025-10-11 03:05:50.981949067 +0000 UTC m=+1161.915036711" Oct 11 03:05:51 crc kubenswrapper[4953]: I1011 03:05:51.134305 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 03:05:51 crc kubenswrapper[4953]: I1011 03:05:51.134400 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 03:05:52 crc kubenswrapper[4953]: I1011 03:05:52.144159 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:05:52 crc kubenswrapper[4953]: I1011 03:05:52.152917 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:05:54 crc kubenswrapper[4953]: I1011 03:05:54.948267 4953 generic.go:334] "Generic (PLEG): container finished" podID="3ea69ea4-ed2c-4527-94de-981b609f1cca" containerID="5ed42581fe9042b94858040ec0efad065ffe12a0f61c84ff6bb546094183dcfb" exitCode=0 Oct 11 03:05:54 crc kubenswrapper[4953]: I1011 03:05:54.948345 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rn257" event={"ID":"3ea69ea4-ed2c-4527-94de-981b609f1cca","Type":"ContainerDied","Data":"5ed42581fe9042b94858040ec0efad065ffe12a0f61c84ff6bb546094183dcfb"} Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.308983 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.422832 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-scripts\") pod \"3ea69ea4-ed2c-4527-94de-981b609f1cca\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.422976 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-combined-ca-bundle\") pod \"3ea69ea4-ed2c-4527-94de-981b609f1cca\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.423002 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7cmp\" (UniqueName: \"kubernetes.io/projected/3ea69ea4-ed2c-4527-94de-981b609f1cca-kube-api-access-f7cmp\") pod \"3ea69ea4-ed2c-4527-94de-981b609f1cca\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.423101 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-config-data\") pod \"3ea69ea4-ed2c-4527-94de-981b609f1cca\" (UID: \"3ea69ea4-ed2c-4527-94de-981b609f1cca\") " Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.429572 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-scripts" (OuterVolumeSpecName: "scripts") pod "3ea69ea4-ed2c-4527-94de-981b609f1cca" (UID: "3ea69ea4-ed2c-4527-94de-981b609f1cca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.430875 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ea69ea4-ed2c-4527-94de-981b609f1cca-kube-api-access-f7cmp" (OuterVolumeSpecName: "kube-api-access-f7cmp") pod "3ea69ea4-ed2c-4527-94de-981b609f1cca" (UID: "3ea69ea4-ed2c-4527-94de-981b609f1cca"). InnerVolumeSpecName "kube-api-access-f7cmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.450701 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-config-data" (OuterVolumeSpecName: "config-data") pod "3ea69ea4-ed2c-4527-94de-981b609f1cca" (UID: "3ea69ea4-ed2c-4527-94de-981b609f1cca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.462947 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ea69ea4-ed2c-4527-94de-981b609f1cca" (UID: "3ea69ea4-ed2c-4527-94de-981b609f1cca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.525943 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.525976 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.525990 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ea69ea4-ed2c-4527-94de-981b609f1cca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.526005 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7cmp\" (UniqueName: \"kubernetes.io/projected/3ea69ea4-ed2c-4527-94de-981b609f1cca-kube-api-access-f7cmp\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.977491 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rn257" event={"ID":"3ea69ea4-ed2c-4527-94de-981b609f1cca","Type":"ContainerDied","Data":"19d27cc66ebb6a7017608d58fba8ae6e5d8f97cdca5c77fcee8ceb63bbffef62"} Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.977544 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19d27cc66ebb6a7017608d58fba8ae6e5d8f97cdca5c77fcee8ceb63bbffef62" Oct 11 03:05:56 crc kubenswrapper[4953]: I1011 03:05:56.977768 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rn257" Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.169363 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.170024 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-log" containerID="cri-o://a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb" gracePeriod=30 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.170144 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-api" containerID="cri-o://d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb" gracePeriod=30 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.203884 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.204461 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a9131dd4-ea0c-401c-ac62-20df5d458956" containerName="nova-scheduler-scheduler" containerID="cri-o://c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335" gracePeriod=30 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.219228 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.219477 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-log" containerID="cri-o://7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35" gracePeriod=30 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.219608 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-metadata" containerID="cri-o://ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769" gracePeriod=30 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.989139 4953 generic.go:334] "Generic (PLEG): container finished" podID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerID="7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35" exitCode=143 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.989220 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"79df2c0c-e0aa-4999-a2be-72941b080ce1","Type":"ContainerDied","Data":"7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35"} Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.991924 4953 generic.go:334] "Generic (PLEG): container finished" podID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerID="a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb" exitCode=143 Oct 11 03:05:57 crc kubenswrapper[4953]: I1011 03:05:57.992008 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80","Type":"ContainerDied","Data":"a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb"} Oct 11 03:05:58 crc kubenswrapper[4953]: I1011 03:05:58.881037 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.001743 4953 generic.go:334] "Generic (PLEG): container finished" podID="a9131dd4-ea0c-401c-ac62-20df5d458956" containerID="c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335" exitCode=0 Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.001784 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9131dd4-ea0c-401c-ac62-20df5d458956","Type":"ContainerDied","Data":"c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335"} Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.001809 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a9131dd4-ea0c-401c-ac62-20df5d458956","Type":"ContainerDied","Data":"e203d7f7423d362eee19ffd8ea2c67b637b1a9ed6caf47e08423fa565c82f3e1"} Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.001825 4953 scope.go:117] "RemoveContainer" containerID="c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.001937 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.025288 4953 scope.go:117] "RemoveContainer" containerID="c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335" Oct 11 03:05:59 crc kubenswrapper[4953]: E1011 03:05:59.025803 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335\": container with ID starting with c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335 not found: ID does not exist" containerID="c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.025928 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335"} err="failed to get container status \"c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335\": rpc error: code = NotFound desc = could not find container \"c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335\": container with ID starting with c43f81f0232789d0456d506c801ed9d126c073b133f0d03c68d96f460ddb5335 not found: ID does not exist" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.076802 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-combined-ca-bundle\") pod \"a9131dd4-ea0c-401c-ac62-20df5d458956\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.077306 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h889n\" (UniqueName: \"kubernetes.io/projected/a9131dd4-ea0c-401c-ac62-20df5d458956-kube-api-access-h889n\") pod \"a9131dd4-ea0c-401c-ac62-20df5d458956\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.077498 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-config-data\") pod \"a9131dd4-ea0c-401c-ac62-20df5d458956\" (UID: \"a9131dd4-ea0c-401c-ac62-20df5d458956\") " Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.085692 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9131dd4-ea0c-401c-ac62-20df5d458956-kube-api-access-h889n" (OuterVolumeSpecName: "kube-api-access-h889n") pod "a9131dd4-ea0c-401c-ac62-20df5d458956" (UID: "a9131dd4-ea0c-401c-ac62-20df5d458956"). InnerVolumeSpecName "kube-api-access-h889n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.111478 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-config-data" (OuterVolumeSpecName: "config-data") pod "a9131dd4-ea0c-401c-ac62-20df5d458956" (UID: "a9131dd4-ea0c-401c-ac62-20df5d458956"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.115041 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9131dd4-ea0c-401c-ac62-20df5d458956" (UID: "a9131dd4-ea0c-401c-ac62-20df5d458956"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.179568 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.179646 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h889n\" (UniqueName: \"kubernetes.io/projected/a9131dd4-ea0c-401c-ac62-20df5d458956-kube-api-access-h889n\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.179662 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9131dd4-ea0c-401c-ac62-20df5d458956-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.334438 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.341947 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.364813 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:59 crc kubenswrapper[4953]: E1011 03:05:59.365995 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9131dd4-ea0c-401c-ac62-20df5d458956" containerName="nova-scheduler-scheduler" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.366023 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9131dd4-ea0c-401c-ac62-20df5d458956" containerName="nova-scheduler-scheduler" Oct 11 03:05:59 crc kubenswrapper[4953]: E1011 03:05:59.366061 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ea69ea4-ed2c-4527-94de-981b609f1cca" containerName="nova-manage" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.366269 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ea69ea4-ed2c-4527-94de-981b609f1cca" containerName="nova-manage" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.367004 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ea69ea4-ed2c-4527-94de-981b609f1cca" containerName="nova-manage" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.367087 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9131dd4-ea0c-401c-ac62-20df5d458956" containerName="nova-scheduler-scheduler" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.368722 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.372723 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.375493 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.388033 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.388145 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgdzn\" (UniqueName: \"kubernetes.io/projected/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-kube-api-access-cgdzn\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.388187 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-config-data\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.489564 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.489707 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgdzn\" (UniqueName: \"kubernetes.io/projected/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-kube-api-access-cgdzn\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.489750 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-config-data\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.493437 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.494124 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-config-data\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.508865 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgdzn\" (UniqueName: \"kubernetes.io/projected/f3ec169a-6416-41d0-bb28-1d14dd2e7dc6-kube-api-access-cgdzn\") pod \"nova-scheduler-0\" (UID: \"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6\") " pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.696827 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 11 03:05:59 crc kubenswrapper[4953]: I1011 03:05:59.810679 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9131dd4-ea0c-401c-ac62-20df5d458956" path="/var/lib/kubelet/pods/a9131dd4-ea0c-401c-ac62-20df5d458956/volumes" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.144274 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.351588 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.180:8775/\": read tcp 10.217.0.2:53790->10.217.0.180:8775: read: connection reset by peer" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.351607 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.180:8775/\": read tcp 10.217.0.2:53800->10.217.0.180:8775: read: connection reset by peer" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.767460 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.865985 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.915537 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-public-tls-certs\") pod \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.915607 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-combined-ca-bundle\") pod \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.915655 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-config-data\") pod \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.915680 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-internal-tls-certs\") pod \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.915755 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v269r\" (UniqueName: \"kubernetes.io/projected/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-kube-api-access-v269r\") pod \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.915851 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-logs\") pod \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\" (UID: \"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80\") " Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.918929 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-logs" (OuterVolumeSpecName: "logs") pod "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" (UID: "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.925235 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-kube-api-access-v269r" (OuterVolumeSpecName: "kube-api-access-v269r") pod "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" (UID: "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80"). InnerVolumeSpecName "kube-api-access-v269r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.948323 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-config-data" (OuterVolumeSpecName: "config-data") pod "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" (UID: "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.956522 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" (UID: "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:00 crc kubenswrapper[4953]: I1011 03:06:00.976114 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" (UID: "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.003873 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" (UID: "6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.017152 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-config-data\") pod \"79df2c0c-e0aa-4999-a2be-72941b080ce1\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.017247 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-combined-ca-bundle\") pod \"79df2c0c-e0aa-4999-a2be-72941b080ce1\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.017329 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79df2c0c-e0aa-4999-a2be-72941b080ce1-logs\") pod \"79df2c0c-e0aa-4999-a2be-72941b080ce1\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.017399 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-nova-metadata-tls-certs\") pod \"79df2c0c-e0aa-4999-a2be-72941b080ce1\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.017440 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmlbm\" (UniqueName: \"kubernetes.io/projected/79df2c0c-e0aa-4999-a2be-72941b080ce1-kube-api-access-jmlbm\") pod \"79df2c0c-e0aa-4999-a2be-72941b080ce1\" (UID: \"79df2c0c-e0aa-4999-a2be-72941b080ce1\") " Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.017984 4953 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.018001 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.018016 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.018026 4953 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.018037 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v269r\" (UniqueName: \"kubernetes.io/projected/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-kube-api-access-v269r\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.018050 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.019860 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79df2c0c-e0aa-4999-a2be-72941b080ce1-logs" (OuterVolumeSpecName: "logs") pod "79df2c0c-e0aa-4999-a2be-72941b080ce1" (UID: "79df2c0c-e0aa-4999-a2be-72941b080ce1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.023852 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79df2c0c-e0aa-4999-a2be-72941b080ce1-kube-api-access-jmlbm" (OuterVolumeSpecName: "kube-api-access-jmlbm") pod "79df2c0c-e0aa-4999-a2be-72941b080ce1" (UID: "79df2c0c-e0aa-4999-a2be-72941b080ce1"). InnerVolumeSpecName "kube-api-access-jmlbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.026657 4953 generic.go:334] "Generic (PLEG): container finished" podID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerID="ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769" exitCode=0 Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.026725 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"79df2c0c-e0aa-4999-a2be-72941b080ce1","Type":"ContainerDied","Data":"ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769"} Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.026756 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"79df2c0c-e0aa-4999-a2be-72941b080ce1","Type":"ContainerDied","Data":"aff45df6f8166aeed49ef41ec4d7035a5be62b8c289f3cce75d536da14efa827"} Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.026776 4953 scope.go:117] "RemoveContainer" containerID="ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.026884 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.035078 4953 generic.go:334] "Generic (PLEG): container finished" podID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerID="d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb" exitCode=0 Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.035197 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80","Type":"ContainerDied","Data":"d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb"} Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.035227 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80","Type":"ContainerDied","Data":"47de1115e022d89894156f5094534476ba628c3188a064cbbd5c3e71a2e3bf49"} Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.035290 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.042658 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6","Type":"ContainerStarted","Data":"d647f0fecc509fe6795b115cd2cc0090abaade068a212d6034cd4209a9d364a4"} Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.042719 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f3ec169a-6416-41d0-bb28-1d14dd2e7dc6","Type":"ContainerStarted","Data":"ed0ad6f1afc7a991e59efbb95a2a1c7281cbde2d38b114672818e1376b0cf252"} Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.046270 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79df2c0c-e0aa-4999-a2be-72941b080ce1" (UID: "79df2c0c-e0aa-4999-a2be-72941b080ce1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.063029 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-config-data" (OuterVolumeSpecName: "config-data") pod "79df2c0c-e0aa-4999-a2be-72941b080ce1" (UID: "79df2c0c-e0aa-4999-a2be-72941b080ce1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.070680 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.070652558 podStartE2EDuration="2.070652558s" podCreationTimestamp="2025-10-11 03:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:06:01.057961803 +0000 UTC m=+1171.991049457" watchObservedRunningTime="2025-10-11 03:06:01.070652558 +0000 UTC m=+1172.003740222" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.071564 4953 scope.go:117] "RemoveContainer" containerID="7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.084858 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.093291 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.098099 4953 scope.go:117] "RemoveContainer" containerID="ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.100393 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769\": container with ID starting with ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769 not found: ID does not exist" containerID="ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.100426 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769"} err="failed to get container status \"ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769\": rpc error: code = NotFound desc = could not find container \"ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769\": container with ID starting with ef72625d0cefbe72c5791f9b7c60444835650620352774c17536ec18957a6769 not found: ID does not exist" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.100446 4953 scope.go:117] "RemoveContainer" containerID="7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.100855 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35\": container with ID starting with 7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35 not found: ID does not exist" containerID="7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.100912 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35"} err="failed to get container status \"7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35\": rpc error: code = NotFound desc = could not find container \"7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35\": container with ID starting with 7309b098b984869d5a73afa6856414707156831bcf81822e299af5d857dd5b35 not found: ID does not exist" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.100969 4953 scope.go:117] "RemoveContainer" containerID="d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101147 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.101488 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-metadata" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101506 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-metadata" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.101524 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-log" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101531 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-log" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.101544 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-api" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101550 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-api" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.101560 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-log" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101565 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-log" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101786 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-log" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101803 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" containerName="nova-metadata-metadata" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101818 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-log" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.101832 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" containerName="nova-api-api" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.102666 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.103044 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "79df2c0c-e0aa-4999-a2be-72941b080ce1" (UID: "79df2c0c-e0aa-4999-a2be-72941b080ce1"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.106235 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.106831 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.107053 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.115596 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.133916 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-public-tls-certs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134037 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134061 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvhfg\" (UniqueName: \"kubernetes.io/projected/275ac8e9-c059-44b2-814d-8e435b228b94-kube-api-access-bvhfg\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134085 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-config-data\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134180 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-internal-tls-certs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134199 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/275ac8e9-c059-44b2-814d-8e435b228b94-logs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134243 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134254 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134267 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79df2c0c-e0aa-4999-a2be-72941b080ce1-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134276 4953 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/79df2c0c-e0aa-4999-a2be-72941b080ce1-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.134285 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmlbm\" (UniqueName: \"kubernetes.io/projected/79df2c0c-e0aa-4999-a2be-72941b080ce1-kube-api-access-jmlbm\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.139888 4953 scope.go:117] "RemoveContainer" containerID="a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.172794 4953 scope.go:117] "RemoveContainer" containerID="d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.174344 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb\": container with ID starting with d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb not found: ID does not exist" containerID="d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.174376 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb"} err="failed to get container status \"d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb\": rpc error: code = NotFound desc = could not find container \"d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb\": container with ID starting with d5ab4f37e293c4be2565e64ed0fbf175b038d3f1f3a5c073f202c0be613d97bb not found: ID does not exist" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.174397 4953 scope.go:117] "RemoveContainer" containerID="a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb" Oct 11 03:06:01 crc kubenswrapper[4953]: E1011 03:06:01.174878 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb\": container with ID starting with a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb not found: ID does not exist" containerID="a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.174902 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb"} err="failed to get container status \"a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb\": rpc error: code = NotFound desc = could not find container \"a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb\": container with ID starting with a1ee48f19bc64499666149142998ebbc6960254e7a40788bc009c916048b2cbb not found: ID does not exist" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.238485 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.238543 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvhfg\" (UniqueName: \"kubernetes.io/projected/275ac8e9-c059-44b2-814d-8e435b228b94-kube-api-access-bvhfg\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.238579 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-config-data\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.238695 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-internal-tls-certs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.238725 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/275ac8e9-c059-44b2-814d-8e435b228b94-logs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.238761 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-public-tls-certs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.239863 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/275ac8e9-c059-44b2-814d-8e435b228b94-logs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.242834 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-public-tls-certs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.243655 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.243703 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-config-data\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.244079 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/275ac8e9-c059-44b2-814d-8e435b228b94-internal-tls-certs\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.257809 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvhfg\" (UniqueName: \"kubernetes.io/projected/275ac8e9-c059-44b2-814d-8e435b228b94-kube-api-access-bvhfg\") pod \"nova-api-0\" (UID: \"275ac8e9-c059-44b2-814d-8e435b228b94\") " pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.360549 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.367611 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.385411 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.387447 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.390293 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.390452 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.396376 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.440946 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.442271 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq4fm\" (UniqueName: \"kubernetes.io/projected/afbe51ae-4d91-446a-b027-806a760887e0-kube-api-access-wq4fm\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.442405 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.442503 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afbe51ae-4d91-446a-b027-806a760887e0-logs\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.442693 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-config-data\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.442802 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.550179 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.550874 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afbe51ae-4d91-446a-b027-806a760887e0-logs\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.550934 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-config-data\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.550958 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.551022 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq4fm\" (UniqueName: \"kubernetes.io/projected/afbe51ae-4d91-446a-b027-806a760887e0-kube-api-access-wq4fm\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.553843 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afbe51ae-4d91-446a-b027-806a760887e0-logs\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.561695 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.580369 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq4fm\" (UniqueName: \"kubernetes.io/projected/afbe51ae-4d91-446a-b027-806a760887e0-kube-api-access-wq4fm\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.585694 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-config-data\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.585744 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afbe51ae-4d91-446a-b027-806a760887e0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"afbe51ae-4d91-446a-b027-806a760887e0\") " pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.757958 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.807902 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80" path="/var/lib/kubelet/pods/6a6e4fef-1ea6-49ad-bc9b-5a0a7f452a80/volumes" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.810252 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79df2c0c-e0aa-4999-a2be-72941b080ce1" path="/var/lib/kubelet/pods/79df2c0c-e0aa-4999-a2be-72941b080ce1/volumes" Oct 11 03:06:01 crc kubenswrapper[4953]: I1011 03:06:01.917020 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 11 03:06:01 crc kubenswrapper[4953]: W1011 03:06:01.918327 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod275ac8e9_c059_44b2_814d_8e435b228b94.slice/crio-f4f3bfa7198e9cedec1e132d45c8ec4056e2d074b542dcb5235e5aeee9e979cb WatchSource:0}: Error finding container f4f3bfa7198e9cedec1e132d45c8ec4056e2d074b542dcb5235e5aeee9e979cb: Status 404 returned error can't find the container with id f4f3bfa7198e9cedec1e132d45c8ec4056e2d074b542dcb5235e5aeee9e979cb Oct 11 03:06:02 crc kubenswrapper[4953]: I1011 03:06:02.058525 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"275ac8e9-c059-44b2-814d-8e435b228b94","Type":"ContainerStarted","Data":"f4f3bfa7198e9cedec1e132d45c8ec4056e2d074b542dcb5235e5aeee9e979cb"} Oct 11 03:06:02 crc kubenswrapper[4953]: I1011 03:06:02.207978 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 11 03:06:02 crc kubenswrapper[4953]: W1011 03:06:02.221270 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafbe51ae_4d91_446a_b027_806a760887e0.slice/crio-7faba7200ba9ad8aa9991dd94a7f6fe12df29ff74fcbb294422ecdd4f804b0a4 WatchSource:0}: Error finding container 7faba7200ba9ad8aa9991dd94a7f6fe12df29ff74fcbb294422ecdd4f804b0a4: Status 404 returned error can't find the container with id 7faba7200ba9ad8aa9991dd94a7f6fe12df29ff74fcbb294422ecdd4f804b0a4 Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.070702 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"275ac8e9-c059-44b2-814d-8e435b228b94","Type":"ContainerStarted","Data":"6d600a2fc0575cbb57cb131f9b14155c4bf6b7a034c05449f547848b1eecf6c1"} Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.071037 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"275ac8e9-c059-44b2-814d-8e435b228b94","Type":"ContainerStarted","Data":"3293e2d3e41f7bf7592fd1369680daaed2953627f71580e03f3e34360f684923"} Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.072247 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"afbe51ae-4d91-446a-b027-806a760887e0","Type":"ContainerStarted","Data":"5a67e39a27670fad06f38a3653680ddeee0e12b3ee21578f7492a182e32d1b77"} Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.072271 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"afbe51ae-4d91-446a-b027-806a760887e0","Type":"ContainerStarted","Data":"8823dc244af267e03a1ceecfda70bbab06e468bde8617b4548fd2bb87989e49d"} Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.072281 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"afbe51ae-4d91-446a-b027-806a760887e0","Type":"ContainerStarted","Data":"7faba7200ba9ad8aa9991dd94a7f6fe12df29ff74fcbb294422ecdd4f804b0a4"} Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.097817 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.097793109 podStartE2EDuration="2.097793109s" podCreationTimestamp="2025-10-11 03:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:06:03.089905313 +0000 UTC m=+1174.022992977" watchObservedRunningTime="2025-10-11 03:06:03.097793109 +0000 UTC m=+1174.030880763" Oct 11 03:06:03 crc kubenswrapper[4953]: I1011 03:06:03.131321 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.131300451 podStartE2EDuration="2.131300451s" podCreationTimestamp="2025-10-11 03:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:06:03.122756719 +0000 UTC m=+1174.055844383" watchObservedRunningTime="2025-10-11 03:06:03.131300451 +0000 UTC m=+1174.064388105" Oct 11 03:06:04 crc kubenswrapper[4953]: I1011 03:06:04.697132 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 11 03:06:06 crc kubenswrapper[4953]: I1011 03:06:06.758245 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 03:06:06 crc kubenswrapper[4953]: I1011 03:06:06.758304 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 11 03:06:09 crc kubenswrapper[4953]: I1011 03:06:09.697004 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 11 03:06:09 crc kubenswrapper[4953]: I1011 03:06:09.751201 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 11 03:06:10 crc kubenswrapper[4953]: I1011 03:06:10.181943 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.316325 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.316433 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.316514 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.319716 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fb74160abbfed52859a5196152b1c675ce92c2f54edf843aaeabd1e6eeb4622a"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.319852 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://fb74160abbfed52859a5196152b1c675ce92c2f54edf843aaeabd1e6eeb4622a" gracePeriod=600 Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.441963 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.442475 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.759574 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 03:06:11 crc kubenswrapper[4953]: I1011 03:06:11.759942 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.174112 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="fb74160abbfed52859a5196152b1c675ce92c2f54edf843aaeabd1e6eeb4622a" exitCode=0 Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.174324 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"fb74160abbfed52859a5196152b1c675ce92c2f54edf843aaeabd1e6eeb4622a"} Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.174542 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"2f93ff398f2fda2d5067e3ee2a670721116ec74b79a52f1ac00d56dd311dccce"} Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.174571 4953 scope.go:117] "RemoveContainer" containerID="7c2039bb1b5e9ebe18e8fb7946b709aaacc565fd9ec85d3707d854610d50c294" Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.459803 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="275ac8e9-c059-44b2-814d-8e435b228b94" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.192:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.459833 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="275ac8e9-c059-44b2-814d-8e435b228b94" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.192:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.779978 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="afbe51ae-4d91-446a-b027-806a760887e0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:06:12 crc kubenswrapper[4953]: I1011 03:06:12.780374 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="afbe51ae-4d91-446a-b027-806a760887e0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 11 03:06:16 crc kubenswrapper[4953]: I1011 03:06:16.223173 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.451598 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.453573 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.454121 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.454176 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.463977 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.467591 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.763110 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.765946 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 11 03:06:21 crc kubenswrapper[4953]: I1011 03:06:21.769765 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 03:06:22 crc kubenswrapper[4953]: I1011 03:06:22.280121 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 11 03:06:30 crc kubenswrapper[4953]: I1011 03:06:30.823154 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:06:31 crc kubenswrapper[4953]: I1011 03:06:31.511437 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:06:35 crc kubenswrapper[4953]: I1011 03:06:35.061947 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerName="rabbitmq" containerID="cri-o://1da1315d18c0c92a907d4916fa378294d9ee5d32187162148948337cb623bda6" gracePeriod=604796 Oct 11 03:06:35 crc kubenswrapper[4953]: I1011 03:06:35.509789 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="54677831-1449-4579-8948-fbf874123d6b" containerName="rabbitmq" containerID="cri-o://10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276" gracePeriod=604797 Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.472278 4953 generic.go:334] "Generic (PLEG): container finished" podID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerID="1da1315d18c0c92a907d4916fa378294d9ee5d32187162148948337cb623bda6" exitCode=0 Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.472344 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fee3eac1-8d2e-4182-a666-d9d15aaccd23","Type":"ContainerDied","Data":"1da1315d18c0c92a907d4916fa378294d9ee5d32187162148948337cb623bda6"} Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.762541 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776087 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-plugins\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776170 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-erlang-cookie\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776221 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-confd\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776341 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776500 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-plugins-conf\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776565 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fee3eac1-8d2e-4182-a666-d9d15aaccd23-pod-info\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776666 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-config-data\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776675 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776779 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776817 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-server-conf\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776862 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-tls\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.776918 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fee3eac1-8d2e-4182-a666-d9d15aaccd23-erlang-cookie-secret\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.777008 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmtnr\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-kube-api-access-nmtnr\") pod \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\" (UID: \"fee3eac1-8d2e-4182-a666-d9d15aaccd23\") " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.777331 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.777902 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.777961 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.777989 4953 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.830240 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/fee3eac1-8d2e-4182-a666-d9d15aaccd23-pod-info" (OuterVolumeSpecName: "pod-info") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.844803 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.848333 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fee3eac1-8d2e-4182-a666-d9d15aaccd23-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.851983 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.858782 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-kube-api-access-nmtnr" (OuterVolumeSpecName: "kube-api-access-nmtnr") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "kube-api-access-nmtnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.871546 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-config-data" (OuterVolumeSpecName: "config-data") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.877075 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-server-conf" (OuterVolumeSpecName: "server-conf") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881136 4953 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881171 4953 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fee3eac1-8d2e-4182-a666-d9d15aaccd23-pod-info\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881182 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881194 4953 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fee3eac1-8d2e-4182-a666-d9d15aaccd23-server-conf\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881202 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881214 4953 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fee3eac1-8d2e-4182-a666-d9d15aaccd23-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.881223 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmtnr\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-kube-api-access-nmtnr\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.909448 4953 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.933870 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "fee3eac1-8d2e-4182-a666-d9d15aaccd23" (UID: "fee3eac1-8d2e-4182-a666-d9d15aaccd23"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.983220 4953 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:41 crc kubenswrapper[4953]: I1011 03:06:41.983263 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fee3eac1-8d2e-4182-a666-d9d15aaccd23-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.069415 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084249 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-erlang-cookie\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084297 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-config-data\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084340 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmd6b\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-kube-api-access-mmd6b\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084362 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-tls\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084399 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084428 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54677831-1449-4579-8948-fbf874123d6b-erlang-cookie-secret\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084446 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-plugins\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084485 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-confd\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084513 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-plugins-conf\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084532 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-server-conf\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.084569 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54677831-1449-4579-8948-fbf874123d6b-pod-info\") pod \"54677831-1449-4579-8948-fbf874123d6b\" (UID: \"54677831-1449-4579-8948-fbf874123d6b\") " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.085494 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.085514 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.086161 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.103696 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.103718 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54677831-1449-4579-8948-fbf874123d6b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.103782 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.103891 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-kube-api-access-mmd6b" (OuterVolumeSpecName: "kube-api-access-mmd6b") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "kube-api-access-mmd6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.110247 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/54677831-1449-4579-8948-fbf874123d6b-pod-info" (OuterVolumeSpecName: "pod-info") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.135301 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-config-data" (OuterVolumeSpecName: "config-data") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.162364 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-server-conf" (OuterVolumeSpecName: "server-conf") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186400 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmd6b\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-kube-api-access-mmd6b\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186452 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186489 4953 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186502 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186512 4953 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/54677831-1449-4579-8948-fbf874123d6b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186521 4953 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186532 4953 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-server-conf\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186541 4953 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/54677831-1449-4579-8948-fbf874123d6b-pod-info\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186550 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.186560 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54677831-1449-4579-8948-fbf874123d6b-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.206195 4953 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.207659 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "54677831-1449-4579-8948-fbf874123d6b" (UID: "54677831-1449-4579-8948-fbf874123d6b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.287564 4953 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/54677831-1449-4579-8948-fbf874123d6b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.287592 4953 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.483661 4953 generic.go:334] "Generic (PLEG): container finished" podID="54677831-1449-4579-8948-fbf874123d6b" containerID="10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276" exitCode=0 Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.483716 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.483730 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54677831-1449-4579-8948-fbf874123d6b","Type":"ContainerDied","Data":"10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276"} Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.483862 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"54677831-1449-4579-8948-fbf874123d6b","Type":"ContainerDied","Data":"d2647cbbfda343516adaf3426ee1bd6c4bfb038a780f41c19fdb50b8bdd67511"} Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.483880 4953 scope.go:117] "RemoveContainer" containerID="10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.487292 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"fee3eac1-8d2e-4182-a666-d9d15aaccd23","Type":"ContainerDied","Data":"ed9fb61ba2a6958c9b398cf92f5ba4db625ae17424f2b387102ed9f0be6f16aa"} Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.487312 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.526308 4953 scope.go:117] "RemoveContainer" containerID="dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.530034 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.550687 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.550759 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.554222 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.562773 4953 scope.go:117] "RemoveContainer" containerID="10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276" Oct 11 03:06:42 crc kubenswrapper[4953]: E1011 03:06:42.566644 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276\": container with ID starting with 10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276 not found: ID does not exist" containerID="10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.566695 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276"} err="failed to get container status \"10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276\": rpc error: code = NotFound desc = could not find container \"10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276\": container with ID starting with 10462da687be45ca6e8bd1147fa952778828dcbf6e81fb649eb6054ae9eee276 not found: ID does not exist" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.566733 4953 scope.go:117] "RemoveContainer" containerID="dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734" Oct 11 03:06:42 crc kubenswrapper[4953]: E1011 03:06:42.567655 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734\": container with ID starting with dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734 not found: ID does not exist" containerID="dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.567708 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734"} err="failed to get container status \"dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734\": rpc error: code = NotFound desc = could not find container \"dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734\": container with ID starting with dcee5fb390c077a638f4a35a4a6206ae488f7d44a63bdc93fd7f03d955d1c734 not found: ID does not exist" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.567738 4953 scope.go:117] "RemoveContainer" containerID="1da1315d18c0c92a907d4916fa378294d9ee5d32187162148948337cb623bda6" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.589232 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: E1011 03:06:42.591416 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54677831-1449-4579-8948-fbf874123d6b" containerName="setup-container" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.591453 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="54677831-1449-4579-8948-fbf874123d6b" containerName="setup-container" Oct 11 03:06:42 crc kubenswrapper[4953]: E1011 03:06:42.591490 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54677831-1449-4579-8948-fbf874123d6b" containerName="rabbitmq" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.591504 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="54677831-1449-4579-8948-fbf874123d6b" containerName="rabbitmq" Oct 11 03:06:42 crc kubenswrapper[4953]: E1011 03:06:42.591554 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerName="setup-container" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.591572 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerName="setup-container" Oct 11 03:06:42 crc kubenswrapper[4953]: E1011 03:06:42.591648 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerName="rabbitmq" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.591660 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerName="rabbitmq" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.593037 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="54677831-1449-4579-8948-fbf874123d6b" containerName="rabbitmq" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.593119 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" containerName="rabbitmq" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.595078 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.603366 4953 scope.go:117] "RemoveContainer" containerID="d6d046c528c7f57df75d9528dedfb6f98dc3d824395bda9180e62ca01e3c4d2d" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.615264 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.615369 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.615512 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.615620 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-pr2k2" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.615766 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.615797 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.616127 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.629114 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.631887 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.639065 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.639308 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.639440 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.641155 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.641284 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.641431 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-b8hzt" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.641521 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.667452 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.685910 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715216 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715265 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-config-data\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715329 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715351 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/712b86c6-78ab-4ce9-96b7-4a627619f79b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715369 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715443 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715478 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715500 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715534 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/712b86c6-78ab-4ce9-96b7-4a627619f79b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715558 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.715575 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srtf4\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-kube-api-access-srtf4\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.817828 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818332 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818355 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818394 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dm6m\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-kube-api-access-2dm6m\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818419 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/712b86c6-78ab-4ce9-96b7-4a627619f79b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818442 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818469 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srtf4\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-kube-api-access-srtf4\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818506 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818533 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818558 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-config-data\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818563 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.818584 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819482 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819525 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819640 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819685 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819753 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819813 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819844 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/712b86c6-78ab-4ce9-96b7-4a627619f79b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819867 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.819907 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.820251 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.820930 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/712b86c6-78ab-4ce9-96b7-4a627619f79b-config-data\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.821129 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.821258 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.821304 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.821341 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.821362 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.825857 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/712b86c6-78ab-4ce9-96b7-4a627619f79b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.826051 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.827004 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/712b86c6-78ab-4ce9-96b7-4a627619f79b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.828856 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.840675 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srtf4\" (UniqueName: \"kubernetes.io/projected/712b86c6-78ab-4ce9-96b7-4a627619f79b-kube-api-access-srtf4\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.854014 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"712b86c6-78ab-4ce9-96b7-4a627619f79b\") " pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922442 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922597 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922644 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922716 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922831 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922891 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922939 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.922975 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.923063 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dm6m\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-kube-api-access-2dm6m\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.923133 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.923192 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.925687 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.926226 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.927189 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.927718 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.928278 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.928361 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.928689 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.929155 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.930813 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.931916 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.948622 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dm6m\" (UniqueName: \"kubernetes.io/projected/7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e-kube-api-access-2dm6m\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.954563 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e\") " pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.972475 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 11 03:06:42 crc kubenswrapper[4953]: I1011 03:06:42.990211 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:06:43 crc kubenswrapper[4953]: I1011 03:06:43.440446 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 11 03:06:43 crc kubenswrapper[4953]: W1011 03:06:43.443033 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7740b4f1_3fe0_4b7b_9bf4_f7625ed8090e.slice/crio-f67698aa225cb1ffb1eeafad5a1d8ef72b4b2c96438856a59d239539ad5a00ec WatchSource:0}: Error finding container f67698aa225cb1ffb1eeafad5a1d8ef72b4b2c96438856a59d239539ad5a00ec: Status 404 returned error can't find the container with id f67698aa225cb1ffb1eeafad5a1d8ef72b4b2c96438856a59d239539ad5a00ec Oct 11 03:06:43 crc kubenswrapper[4953]: I1011 03:06:43.446870 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 11 03:06:43 crc kubenswrapper[4953]: I1011 03:06:43.536118 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e","Type":"ContainerStarted","Data":"f67698aa225cb1ffb1eeafad5a1d8ef72b4b2c96438856a59d239539ad5a00ec"} Oct 11 03:06:43 crc kubenswrapper[4953]: I1011 03:06:43.544948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"712b86c6-78ab-4ce9-96b7-4a627619f79b","Type":"ContainerStarted","Data":"15cd2e6b285e410029c4976230cb03aaf90b36ed56f4d2ee3de6a36f83d6e706"} Oct 11 03:06:43 crc kubenswrapper[4953]: I1011 03:06:43.804935 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54677831-1449-4579-8948-fbf874123d6b" path="/var/lib/kubelet/pods/54677831-1449-4579-8948-fbf874123d6b/volumes" Oct 11 03:06:43 crc kubenswrapper[4953]: I1011 03:06:43.806286 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fee3eac1-8d2e-4182-a666-d9d15aaccd23" path="/var/lib/kubelet/pods/fee3eac1-8d2e-4182-a666-d9d15aaccd23/volumes" Oct 11 03:06:45 crc kubenswrapper[4953]: I1011 03:06:45.561785 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"712b86c6-78ab-4ce9-96b7-4a627619f79b","Type":"ContainerStarted","Data":"056bb918f1bb2c7e74fac77cff313bf0d5aac908b1cc39496ff300ecb6891d59"} Oct 11 03:06:45 crc kubenswrapper[4953]: I1011 03:06:45.563365 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e","Type":"ContainerStarted","Data":"ca5ab8f3787a099375c712863b85af8568cd68e938995db8d66c67901e6e56f3"} Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.122294 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6447ccbd8f-fn5nn"] Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.126532 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.132630 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.170954 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6447ccbd8f-fn5nn"] Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.292217 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-nb\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.292308 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-dns-svc\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.292363 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-config\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.292472 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-sb\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.292523 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-openstack-edpm-ipam\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.292554 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9xtv\" (UniqueName: \"kubernetes.io/projected/63b0acf8-c59d-447e-9455-ed176ab5b5e6-kube-api-access-d9xtv\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.394492 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-config\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.394591 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-sb\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.394650 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-openstack-edpm-ipam\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.394685 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9xtv\" (UniqueName: \"kubernetes.io/projected/63b0acf8-c59d-447e-9455-ed176ab5b5e6-kube-api-access-d9xtv\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.394725 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-nb\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.394804 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-dns-svc\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.395526 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-openstack-edpm-ipam\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.395688 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-nb\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.395832 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-sb\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.395893 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-dns-svc\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.396525 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-config\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.418827 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9xtv\" (UniqueName: \"kubernetes.io/projected/63b0acf8-c59d-447e-9455-ed176ab5b5e6-kube-api-access-d9xtv\") pod \"dnsmasq-dns-6447ccbd8f-fn5nn\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.451499 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:46 crc kubenswrapper[4953]: I1011 03:06:46.949330 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6447ccbd8f-fn5nn"] Oct 11 03:06:47 crc kubenswrapper[4953]: I1011 03:06:47.583409 4953 generic.go:334] "Generic (PLEG): container finished" podID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerID="378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937" exitCode=0 Oct 11 03:06:47 crc kubenswrapper[4953]: I1011 03:06:47.583459 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" event={"ID":"63b0acf8-c59d-447e-9455-ed176ab5b5e6","Type":"ContainerDied","Data":"378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937"} Oct 11 03:06:47 crc kubenswrapper[4953]: I1011 03:06:47.583489 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" event={"ID":"63b0acf8-c59d-447e-9455-ed176ab5b5e6","Type":"ContainerStarted","Data":"5fb9f68929cf9b23ff1e09cf57f262e9fa84185f36b3b155bbfa99102b4c4ece"} Oct 11 03:06:48 crc kubenswrapper[4953]: I1011 03:06:48.602451 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" event={"ID":"63b0acf8-c59d-447e-9455-ed176ab5b5e6","Type":"ContainerStarted","Data":"1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459"} Oct 11 03:06:48 crc kubenswrapper[4953]: I1011 03:06:48.603042 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:48 crc kubenswrapper[4953]: I1011 03:06:48.638489 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" podStartSLOduration=2.638460525 podStartE2EDuration="2.638460525s" podCreationTimestamp="2025-10-11 03:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:06:48.626857657 +0000 UTC m=+1219.559945361" watchObservedRunningTime="2025-10-11 03:06:48.638460525 +0000 UTC m=+1219.571548199" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.453825 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.536293 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b856c5697-ld7pk"] Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.537134 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" podUID="bee71ad6-4395-4369-9501-7303bdae777e" containerName="dnsmasq-dns" containerID="cri-o://8112d8d716cd9e77403b5ff79d3b5ba8594714f7b5deb0ba01ab97620a94721a" gracePeriod=10 Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.709261 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-864d5fc68c-n86p9"] Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.714756 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.730016 4953 generic.go:334] "Generic (PLEG): container finished" podID="bee71ad6-4395-4369-9501-7303bdae777e" containerID="8112d8d716cd9e77403b5ff79d3b5ba8594714f7b5deb0ba01ab97620a94721a" exitCode=0 Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.730070 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" event={"ID":"bee71ad6-4395-4369-9501-7303bdae777e","Type":"ContainerDied","Data":"8112d8d716cd9e77403b5ff79d3b5ba8594714f7b5deb0ba01ab97620a94721a"} Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.735215 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-864d5fc68c-n86p9"] Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.828140 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-sb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.828189 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-dns-svc\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.828496 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8kpb\" (UniqueName: \"kubernetes.io/projected/cad37403-5515-40a8-ab51-d2a40ffbff0f-kube-api-access-s8kpb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.828792 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-config\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.828933 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-nb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.829061 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-openstack-edpm-ipam\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.930462 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-openstack-edpm-ipam\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.931037 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-sb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.931064 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-dns-svc\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.931145 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8kpb\" (UniqueName: \"kubernetes.io/projected/cad37403-5515-40a8-ab51-d2a40ffbff0f-kube-api-access-s8kpb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.931177 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-config\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.931255 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-nb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.932395 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-nb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.933051 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-config\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.933470 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-openstack-edpm-ipam\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.933587 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-sb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.940053 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-dns-svc\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:56 crc kubenswrapper[4953]: I1011 03:06:56.965653 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8kpb\" (UniqueName: \"kubernetes.io/projected/cad37403-5515-40a8-ab51-d2a40ffbff0f-kube-api-access-s8kpb\") pod \"dnsmasq-dns-864d5fc68c-n86p9\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.048869 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.186555 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.357012 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ll4bt\" (UniqueName: \"kubernetes.io/projected/bee71ad6-4395-4369-9501-7303bdae777e-kube-api-access-ll4bt\") pod \"bee71ad6-4395-4369-9501-7303bdae777e\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.357191 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-sb\") pod \"bee71ad6-4395-4369-9501-7303bdae777e\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.357271 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-dns-svc\") pod \"bee71ad6-4395-4369-9501-7303bdae777e\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.357307 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-config\") pod \"bee71ad6-4395-4369-9501-7303bdae777e\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.357416 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-nb\") pod \"bee71ad6-4395-4369-9501-7303bdae777e\" (UID: \"bee71ad6-4395-4369-9501-7303bdae777e\") " Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.410031 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bee71ad6-4395-4369-9501-7303bdae777e-kube-api-access-ll4bt" (OuterVolumeSpecName: "kube-api-access-ll4bt") pod "bee71ad6-4395-4369-9501-7303bdae777e" (UID: "bee71ad6-4395-4369-9501-7303bdae777e"). InnerVolumeSpecName "kube-api-access-ll4bt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.454341 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bee71ad6-4395-4369-9501-7303bdae777e" (UID: "bee71ad6-4395-4369-9501-7303bdae777e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.461933 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.461976 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ll4bt\" (UniqueName: \"kubernetes.io/projected/bee71ad6-4395-4369-9501-7303bdae777e-kube-api-access-ll4bt\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.479067 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bee71ad6-4395-4369-9501-7303bdae777e" (UID: "bee71ad6-4395-4369-9501-7303bdae777e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.492723 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-config" (OuterVolumeSpecName: "config") pod "bee71ad6-4395-4369-9501-7303bdae777e" (UID: "bee71ad6-4395-4369-9501-7303bdae777e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.496039 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bee71ad6-4395-4369-9501-7303bdae777e" (UID: "bee71ad6-4395-4369-9501-7303bdae777e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.563844 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.563885 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.563896 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bee71ad6-4395-4369-9501-7303bdae777e-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.625240 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-864d5fc68c-n86p9"] Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.739661 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" event={"ID":"cad37403-5515-40a8-ab51-d2a40ffbff0f","Type":"ContainerStarted","Data":"1b5e76b4b7e8e632df8b786ae32ce02347bb174be8afaf92a01dfb7ac32feba1"} Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.741583 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" event={"ID":"bee71ad6-4395-4369-9501-7303bdae777e","Type":"ContainerDied","Data":"a6ae7c089aa9c61ab1c331cc5a066914e0e2d5a1bf236ea33fa611df24955bb3"} Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.741657 4953 scope.go:117] "RemoveContainer" containerID="8112d8d716cd9e77403b5ff79d3b5ba8594714f7b5deb0ba01ab97620a94721a" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.741794 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b856c5697-ld7pk" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.805566 4953 scope.go:117] "RemoveContainer" containerID="7a00521fc580f301d455574c19ef307653f0cd149265446d6e17fc6c3b07ef5c" Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.823373 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b856c5697-ld7pk"] Oct 11 03:06:57 crc kubenswrapper[4953]: I1011 03:06:57.838720 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b856c5697-ld7pk"] Oct 11 03:06:58 crc kubenswrapper[4953]: I1011 03:06:58.758639 4953 generic.go:334] "Generic (PLEG): container finished" podID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerID="a810d2768a3b27bf97fe3ddcedbba901c162e27e5e34a8e34744292bd2ed69cb" exitCode=0 Oct 11 03:06:58 crc kubenswrapper[4953]: I1011 03:06:58.758746 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" event={"ID":"cad37403-5515-40a8-ab51-d2a40ffbff0f","Type":"ContainerDied","Data":"a810d2768a3b27bf97fe3ddcedbba901c162e27e5e34a8e34744292bd2ed69cb"} Oct 11 03:06:59 crc kubenswrapper[4953]: I1011 03:06:59.778552 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" event={"ID":"cad37403-5515-40a8-ab51-d2a40ffbff0f","Type":"ContainerStarted","Data":"406880cb6e89e8a4a1b6ed10c31cfeac875b27262587b84f9f5798f8966ca5e1"} Oct 11 03:06:59 crc kubenswrapper[4953]: I1011 03:06:59.779063 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:06:59 crc kubenswrapper[4953]: I1011 03:06:59.819808 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" podStartSLOduration=3.8197767000000002 podStartE2EDuration="3.8197767s" podCreationTimestamp="2025-10-11 03:06:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:06:59.803811493 +0000 UTC m=+1230.736899227" watchObservedRunningTime="2025-10-11 03:06:59.8197767 +0000 UTC m=+1230.752864354" Oct 11 03:06:59 crc kubenswrapper[4953]: I1011 03:06:59.826267 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bee71ad6-4395-4369-9501-7303bdae777e" path="/var/lib/kubelet/pods/bee71ad6-4395-4369-9501-7303bdae777e/volumes" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.050891 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.158526 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6447ccbd8f-fn5nn"] Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.159318 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerName="dnsmasq-dns" containerID="cri-o://1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459" gracePeriod=10 Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.639490 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.729258 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-sb\") pod \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.729390 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-dns-svc\") pod \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.729461 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9xtv\" (UniqueName: \"kubernetes.io/projected/63b0acf8-c59d-447e-9455-ed176ab5b5e6-kube-api-access-d9xtv\") pod \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.729492 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-config\") pod \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.729510 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-openstack-edpm-ipam\") pod \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.729557 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-nb\") pod \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\" (UID: \"63b0acf8-c59d-447e-9455-ed176ab5b5e6\") " Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.736361 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63b0acf8-c59d-447e-9455-ed176ab5b5e6-kube-api-access-d9xtv" (OuterVolumeSpecName: "kube-api-access-d9xtv") pod "63b0acf8-c59d-447e-9455-ed176ab5b5e6" (UID: "63b0acf8-c59d-447e-9455-ed176ab5b5e6"). InnerVolumeSpecName "kube-api-access-d9xtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.797421 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "63b0acf8-c59d-447e-9455-ed176ab5b5e6" (UID: "63b0acf8-c59d-447e-9455-ed176ab5b5e6"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.802129 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "63b0acf8-c59d-447e-9455-ed176ab5b5e6" (UID: "63b0acf8-c59d-447e-9455-ed176ab5b5e6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.807525 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "63b0acf8-c59d-447e-9455-ed176ab5b5e6" (UID: "63b0acf8-c59d-447e-9455-ed176ab5b5e6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.822236 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "63b0acf8-c59d-447e-9455-ed176ab5b5e6" (UID: "63b0acf8-c59d-447e-9455-ed176ab5b5e6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.825509 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-config" (OuterVolumeSpecName: "config") pod "63b0acf8-c59d-447e-9455-ed176ab5b5e6" (UID: "63b0acf8-c59d-447e-9455-ed176ab5b5e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.832029 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.832062 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9xtv\" (UniqueName: \"kubernetes.io/projected/63b0acf8-c59d-447e-9455-ed176ab5b5e6-kube-api-access-d9xtv\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.832074 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.832086 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.832095 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.832104 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63b0acf8-c59d-447e-9455-ed176ab5b5e6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.869158 4953 generic.go:334] "Generic (PLEG): container finished" podID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerID="1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459" exitCode=0 Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.869254 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.870931 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" event={"ID":"63b0acf8-c59d-447e-9455-ed176ab5b5e6","Type":"ContainerDied","Data":"1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459"} Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.871025 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6447ccbd8f-fn5nn" event={"ID":"63b0acf8-c59d-447e-9455-ed176ab5b5e6","Type":"ContainerDied","Data":"5fb9f68929cf9b23ff1e09cf57f262e9fa84185f36b3b155bbfa99102b4c4ece"} Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.871099 4953 scope.go:117] "RemoveContainer" containerID="1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.899313 4953 scope.go:117] "RemoveContainer" containerID="378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.903351 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6447ccbd8f-fn5nn"] Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.912248 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6447ccbd8f-fn5nn"] Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.949937 4953 scope.go:117] "RemoveContainer" containerID="1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459" Oct 11 03:07:07 crc kubenswrapper[4953]: E1011 03:07:07.950594 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459\": container with ID starting with 1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459 not found: ID does not exist" containerID="1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.950646 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459"} err="failed to get container status \"1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459\": rpc error: code = NotFound desc = could not find container \"1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459\": container with ID starting with 1589b4252187ab30913fd7ca3c287be8a51ccc1490534fc3f6b8240a4c43e459 not found: ID does not exist" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.950703 4953 scope.go:117] "RemoveContainer" containerID="378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937" Oct 11 03:07:07 crc kubenswrapper[4953]: E1011 03:07:07.950986 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937\": container with ID starting with 378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937 not found: ID does not exist" containerID="378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937" Oct 11 03:07:07 crc kubenswrapper[4953]: I1011 03:07:07.951095 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937"} err="failed to get container status \"378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937\": rpc error: code = NotFound desc = could not find container \"378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937\": container with ID starting with 378438692aa9ebd4ea01c0035bc8cfdb443f1a5804900e7ffbfd4e9baccc9937 not found: ID does not exist" Oct 11 03:07:09 crc kubenswrapper[4953]: I1011 03:07:09.816787 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" path="/var/lib/kubelet/pods/63b0acf8-c59d-447e-9455-ed176ab5b5e6/volumes" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.198241 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x"] Oct 11 03:07:17 crc kubenswrapper[4953]: E1011 03:07:17.199534 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerName="init" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.199568 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerName="init" Oct 11 03:07:17 crc kubenswrapper[4953]: E1011 03:07:17.199593 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee71ad6-4395-4369-9501-7303bdae777e" containerName="dnsmasq-dns" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.199667 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee71ad6-4395-4369-9501-7303bdae777e" containerName="dnsmasq-dns" Oct 11 03:07:17 crc kubenswrapper[4953]: E1011 03:07:17.199713 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerName="dnsmasq-dns" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.199736 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerName="dnsmasq-dns" Oct 11 03:07:17 crc kubenswrapper[4953]: E1011 03:07:17.199769 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee71ad6-4395-4369-9501-7303bdae777e" containerName="init" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.199791 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee71ad6-4395-4369-9501-7303bdae777e" containerName="init" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.200150 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="bee71ad6-4395-4369-9501-7303bdae777e" containerName="dnsmasq-dns" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.200177 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="63b0acf8-c59d-447e-9455-ed176ab5b5e6" containerName="dnsmasq-dns" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.201220 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.205152 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.205543 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.205951 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.206257 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.207673 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x"] Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.350973 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.358180 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-748z8\" (UniqueName: \"kubernetes.io/projected/aa7542dd-a07c-4d37-bba5-4b72a648d586-kube-api-access-748z8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.358410 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.358520 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.460808 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.461015 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-748z8\" (UniqueName: \"kubernetes.io/projected/aa7542dd-a07c-4d37-bba5-4b72a648d586-kube-api-access-748z8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.461171 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.461244 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.468318 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.468359 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.473108 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.494070 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-748z8\" (UniqueName: \"kubernetes.io/projected/aa7542dd-a07c-4d37-bba5-4b72a648d586-kube-api-access-748z8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.574812 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.980966 4953 generic.go:334] "Generic (PLEG): container finished" podID="712b86c6-78ab-4ce9-96b7-4a627619f79b" containerID="056bb918f1bb2c7e74fac77cff313bf0d5aac908b1cc39496ff300ecb6891d59" exitCode=0 Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.981024 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"712b86c6-78ab-4ce9-96b7-4a627619f79b","Type":"ContainerDied","Data":"056bb918f1bb2c7e74fac77cff313bf0d5aac908b1cc39496ff300ecb6891d59"} Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.984249 4953 generic.go:334] "Generic (PLEG): container finished" podID="7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e" containerID="ca5ab8f3787a099375c712863b85af8568cd68e938995db8d66c67901e6e56f3" exitCode=0 Oct 11 03:07:17 crc kubenswrapper[4953]: I1011 03:07:17.984304 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e","Type":"ContainerDied","Data":"ca5ab8f3787a099375c712863b85af8568cd68e938995db8d66c67901e6e56f3"} Oct 11 03:07:18 crc kubenswrapper[4953]: I1011 03:07:18.196181 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x"] Oct 11 03:07:18 crc kubenswrapper[4953]: W1011 03:07:18.204037 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa7542dd_a07c_4d37_bba5_4b72a648d586.slice/crio-e5df351a3bba1416598527cdd29cf7708b3a674c207bf7af915bfc5d0c76325f WatchSource:0}: Error finding container e5df351a3bba1416598527cdd29cf7708b3a674c207bf7af915bfc5d0c76325f: Status 404 returned error can't find the container with id e5df351a3bba1416598527cdd29cf7708b3a674c207bf7af915bfc5d0c76325f Oct 11 03:07:18 crc kubenswrapper[4953]: I1011 03:07:18.208819 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:07:18 crc kubenswrapper[4953]: I1011 03:07:18.995318 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" event={"ID":"aa7542dd-a07c-4d37-bba5-4b72a648d586","Type":"ContainerStarted","Data":"e5df351a3bba1416598527cdd29cf7708b3a674c207bf7af915bfc5d0c76325f"} Oct 11 03:07:18 crc kubenswrapper[4953]: I1011 03:07:18.998712 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e","Type":"ContainerStarted","Data":"16d1eead591ac1e93057fe2365864ddf5875e184167bb1467d4eccc9bd063a4d"} Oct 11 03:07:18 crc kubenswrapper[4953]: I1011 03:07:18.999113 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:07:19 crc kubenswrapper[4953]: I1011 03:07:19.002948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"712b86c6-78ab-4ce9-96b7-4a627619f79b","Type":"ContainerStarted","Data":"b0228e942aa6ab1bfe2bd56ee6fe876bf3ebba00e218ea490c3b8409becfbf7e"} Oct 11 03:07:19 crc kubenswrapper[4953]: I1011 03:07:19.003167 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 11 03:07:19 crc kubenswrapper[4953]: I1011 03:07:19.026344 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.026317199 podStartE2EDuration="37.026317199s" podCreationTimestamp="2025-10-11 03:06:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:07:19.017664434 +0000 UTC m=+1249.950752088" watchObservedRunningTime="2025-10-11 03:07:19.026317199 +0000 UTC m=+1249.959404863" Oct 11 03:07:19 crc kubenswrapper[4953]: I1011 03:07:19.054903 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.054881168 podStartE2EDuration="37.054881168s" podCreationTimestamp="2025-10-11 03:06:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:07:19.053921724 +0000 UTC m=+1249.987009368" watchObservedRunningTime="2025-10-11 03:07:19.054881168 +0000 UTC m=+1249.987968812" Oct 11 03:07:28 crc kubenswrapper[4953]: I1011 03:07:28.093683 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" event={"ID":"aa7542dd-a07c-4d37-bba5-4b72a648d586","Type":"ContainerStarted","Data":"d62492716b41f187b722cbfb7ad010e8581881175b6634238f35a5d33a4127ab"} Oct 11 03:07:28 crc kubenswrapper[4953]: I1011 03:07:28.110492 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" podStartSLOduration=2.418813544 podStartE2EDuration="11.110473054s" podCreationTimestamp="2025-10-11 03:07:17 +0000 UTC" firstStartedPulling="2025-10-11 03:07:18.20836789 +0000 UTC m=+1249.141455534" lastFinishedPulling="2025-10-11 03:07:26.9000274 +0000 UTC m=+1257.833115044" observedRunningTime="2025-10-11 03:07:28.108321671 +0000 UTC m=+1259.041409325" watchObservedRunningTime="2025-10-11 03:07:28.110473054 +0000 UTC m=+1259.043560688" Oct 11 03:07:32 crc kubenswrapper[4953]: I1011 03:07:32.975881 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 11 03:07:32 crc kubenswrapper[4953]: I1011 03:07:32.995857 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 11 03:07:39 crc kubenswrapper[4953]: I1011 03:07:39.201735 4953 generic.go:334] "Generic (PLEG): container finished" podID="aa7542dd-a07c-4d37-bba5-4b72a648d586" containerID="d62492716b41f187b722cbfb7ad010e8581881175b6634238f35a5d33a4127ab" exitCode=0 Oct 11 03:07:39 crc kubenswrapper[4953]: I1011 03:07:39.201870 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" event={"ID":"aa7542dd-a07c-4d37-bba5-4b72a648d586","Type":"ContainerDied","Data":"d62492716b41f187b722cbfb7ad010e8581881175b6634238f35a5d33a4127ab"} Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.649562 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.838844 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-748z8\" (UniqueName: \"kubernetes.io/projected/aa7542dd-a07c-4d37-bba5-4b72a648d586-kube-api-access-748z8\") pod \"aa7542dd-a07c-4d37-bba5-4b72a648d586\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.839316 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-inventory\") pod \"aa7542dd-a07c-4d37-bba5-4b72a648d586\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.839429 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-ssh-key\") pod \"aa7542dd-a07c-4d37-bba5-4b72a648d586\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.839617 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-repo-setup-combined-ca-bundle\") pod \"aa7542dd-a07c-4d37-bba5-4b72a648d586\" (UID: \"aa7542dd-a07c-4d37-bba5-4b72a648d586\") " Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.845871 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "aa7542dd-a07c-4d37-bba5-4b72a648d586" (UID: "aa7542dd-a07c-4d37-bba5-4b72a648d586"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.845973 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa7542dd-a07c-4d37-bba5-4b72a648d586-kube-api-access-748z8" (OuterVolumeSpecName: "kube-api-access-748z8") pod "aa7542dd-a07c-4d37-bba5-4b72a648d586" (UID: "aa7542dd-a07c-4d37-bba5-4b72a648d586"). InnerVolumeSpecName "kube-api-access-748z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.868465 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aa7542dd-a07c-4d37-bba5-4b72a648d586" (UID: "aa7542dd-a07c-4d37-bba5-4b72a648d586"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.872373 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-inventory" (OuterVolumeSpecName: "inventory") pod "aa7542dd-a07c-4d37-bba5-4b72a648d586" (UID: "aa7542dd-a07c-4d37-bba5-4b72a648d586"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.942052 4953 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.942083 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-748z8\" (UniqueName: \"kubernetes.io/projected/aa7542dd-a07c-4d37-bba5-4b72a648d586-kube-api-access-748z8\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.942093 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:40 crc kubenswrapper[4953]: I1011 03:07:40.942101 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aa7542dd-a07c-4d37-bba5-4b72a648d586-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.233750 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" event={"ID":"aa7542dd-a07c-4d37-bba5-4b72a648d586","Type":"ContainerDied","Data":"e5df351a3bba1416598527cdd29cf7708b3a674c207bf7af915bfc5d0c76325f"} Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.233817 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5df351a3bba1416598527cdd29cf7708b3a674c207bf7af915bfc5d0c76325f" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.233862 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.334211 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56"] Oct 11 03:07:41 crc kubenswrapper[4953]: E1011 03:07:41.334660 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7542dd-a07c-4d37-bba5-4b72a648d586" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.334680 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7542dd-a07c-4d37-bba5-4b72a648d586" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.334851 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7542dd-a07c-4d37-bba5-4b72a648d586" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.335668 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.338205 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.338447 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.338803 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.338962 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.368997 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56"] Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.457837 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.458058 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.458242 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.458349 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bhm5\" (UniqueName: \"kubernetes.io/projected/b5d756a9-af2d-483a-9f6b-97974302220a-kube-api-access-8bhm5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.561119 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.561209 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bhm5\" (UniqueName: \"kubernetes.io/projected/b5d756a9-af2d-483a-9f6b-97974302220a-kube-api-access-8bhm5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.561370 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.561508 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.565647 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.567512 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.568411 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.587202 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bhm5\" (UniqueName: \"kubernetes.io/projected/b5d756a9-af2d-483a-9f6b-97974302220a-kube-api-access-8bhm5\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:41 crc kubenswrapper[4953]: I1011 03:07:41.658919 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:07:42 crc kubenswrapper[4953]: I1011 03:07:42.185352 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56"] Oct 11 03:07:42 crc kubenswrapper[4953]: W1011 03:07:42.190384 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5d756a9_af2d_483a_9f6b_97974302220a.slice/crio-3672b51850d460876628066535edd34fab051188520ad8cf84d4cf80a4f5fc1f WatchSource:0}: Error finding container 3672b51850d460876628066535edd34fab051188520ad8cf84d4cf80a4f5fc1f: Status 404 returned error can't find the container with id 3672b51850d460876628066535edd34fab051188520ad8cf84d4cf80a4f5fc1f Oct 11 03:07:42 crc kubenswrapper[4953]: I1011 03:07:42.241878 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" event={"ID":"b5d756a9-af2d-483a-9f6b-97974302220a","Type":"ContainerStarted","Data":"3672b51850d460876628066535edd34fab051188520ad8cf84d4cf80a4f5fc1f"} Oct 11 03:07:43 crc kubenswrapper[4953]: I1011 03:07:43.252895 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" event={"ID":"b5d756a9-af2d-483a-9f6b-97974302220a","Type":"ContainerStarted","Data":"be70a08430be37c69d86203e14670da797433ca61110a5b71e06d8c57c9961a9"} Oct 11 03:07:43 crc kubenswrapper[4953]: I1011 03:07:43.280989 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" podStartSLOduration=1.7717485929999999 podStartE2EDuration="2.280965396s" podCreationTimestamp="2025-10-11 03:07:41 +0000 UTC" firstStartedPulling="2025-10-11 03:07:42.193197058 +0000 UTC m=+1273.126284742" lastFinishedPulling="2025-10-11 03:07:42.702413891 +0000 UTC m=+1273.635501545" observedRunningTime="2025-10-11 03:07:43.273641824 +0000 UTC m=+1274.206729508" watchObservedRunningTime="2025-10-11 03:07:43.280965396 +0000 UTC m=+1274.214053040" Oct 11 03:08:11 crc kubenswrapper[4953]: I1011 03:08:11.316564 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:08:11 crc kubenswrapper[4953]: I1011 03:08:11.317583 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:08:41 crc kubenswrapper[4953]: I1011 03:08:41.317154 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:08:41 crc kubenswrapper[4953]: I1011 03:08:41.318055 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:08:46 crc kubenswrapper[4953]: I1011 03:08:46.431223 4953 scope.go:117] "RemoveContainer" containerID="f8e175cff7befb9ea2cd0bab4a0a73abbfddc321e48177ee6ab9373439368529" Oct 11 03:08:46 crc kubenswrapper[4953]: I1011 03:08:46.471762 4953 scope.go:117] "RemoveContainer" containerID="d81deedc7cc8170421f7b175d6b1b18ca838616b25e7c1525e3135db5803f57a" Oct 11 03:09:11 crc kubenswrapper[4953]: I1011 03:09:11.318195 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:09:11 crc kubenswrapper[4953]: I1011 03:09:11.319033 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:09:11 crc kubenswrapper[4953]: I1011 03:09:11.319104 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:09:11 crc kubenswrapper[4953]: I1011 03:09:11.320316 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f93ff398f2fda2d5067e3ee2a670721116ec74b79a52f1ac00d56dd311dccce"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:09:11 crc kubenswrapper[4953]: I1011 03:09:11.320420 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://2f93ff398f2fda2d5067e3ee2a670721116ec74b79a52f1ac00d56dd311dccce" gracePeriod=600 Oct 11 03:09:12 crc kubenswrapper[4953]: I1011 03:09:12.249873 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="2f93ff398f2fda2d5067e3ee2a670721116ec74b79a52f1ac00d56dd311dccce" exitCode=0 Oct 11 03:09:12 crc kubenswrapper[4953]: I1011 03:09:12.249936 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"2f93ff398f2fda2d5067e3ee2a670721116ec74b79a52f1ac00d56dd311dccce"} Oct 11 03:09:12 crc kubenswrapper[4953]: I1011 03:09:12.250577 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b"} Oct 11 03:09:12 crc kubenswrapper[4953]: I1011 03:09:12.250614 4953 scope.go:117] "RemoveContainer" containerID="fb74160abbfed52859a5196152b1c675ce92c2f54edf843aaeabd1e6eeb4622a" Oct 11 03:09:27 crc kubenswrapper[4953]: I1011 03:09:27.861813 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4gw95"] Oct 11 03:09:27 crc kubenswrapper[4953]: I1011 03:09:27.864290 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:27 crc kubenswrapper[4953]: I1011 03:09:27.878063 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4gw95"] Oct 11 03:09:27 crc kubenswrapper[4953]: I1011 03:09:27.941022 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-utilities\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:27 crc kubenswrapper[4953]: I1011 03:09:27.941084 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw25b\" (UniqueName: \"kubernetes.io/projected/b8c45a64-5876-4411-ba2f-05bcae44a3ea-kube-api-access-bw25b\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:27 crc kubenswrapper[4953]: I1011 03:09:27.941188 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-catalog-content\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.042900 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-catalog-content\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.043012 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-utilities\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.043073 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw25b\" (UniqueName: \"kubernetes.io/projected/b8c45a64-5876-4411-ba2f-05bcae44a3ea-kube-api-access-bw25b\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.043511 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-utilities\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.043534 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-catalog-content\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.066675 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw25b\" (UniqueName: \"kubernetes.io/projected/b8c45a64-5876-4411-ba2f-05bcae44a3ea-kube-api-access-bw25b\") pod \"community-operators-4gw95\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.183796 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:28 crc kubenswrapper[4953]: I1011 03:09:28.690497 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4gw95"] Oct 11 03:09:29 crc kubenswrapper[4953]: I1011 03:09:29.437248 4953 generic.go:334] "Generic (PLEG): container finished" podID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerID="ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819" exitCode=0 Oct 11 03:09:29 crc kubenswrapper[4953]: I1011 03:09:29.437315 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4gw95" event={"ID":"b8c45a64-5876-4411-ba2f-05bcae44a3ea","Type":"ContainerDied","Data":"ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819"} Oct 11 03:09:29 crc kubenswrapper[4953]: I1011 03:09:29.438830 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4gw95" event={"ID":"b8c45a64-5876-4411-ba2f-05bcae44a3ea","Type":"ContainerStarted","Data":"88af3e35e99ca7896010ffd22dd04642fa86abd02565b7d7706850bbce8794d3"} Oct 11 03:09:31 crc kubenswrapper[4953]: I1011 03:09:31.460211 4953 generic.go:334] "Generic (PLEG): container finished" podID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerID="7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960" exitCode=0 Oct 11 03:09:31 crc kubenswrapper[4953]: I1011 03:09:31.460280 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4gw95" event={"ID":"b8c45a64-5876-4411-ba2f-05bcae44a3ea","Type":"ContainerDied","Data":"7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960"} Oct 11 03:09:32 crc kubenswrapper[4953]: I1011 03:09:32.471088 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4gw95" event={"ID":"b8c45a64-5876-4411-ba2f-05bcae44a3ea","Type":"ContainerStarted","Data":"eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd"} Oct 11 03:09:32 crc kubenswrapper[4953]: I1011 03:09:32.488031 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4gw95" podStartSLOduration=3.046130772 podStartE2EDuration="5.488013279s" podCreationTimestamp="2025-10-11 03:09:27 +0000 UTC" firstStartedPulling="2025-10-11 03:09:29.440779553 +0000 UTC m=+1380.373867227" lastFinishedPulling="2025-10-11 03:09:31.88266208 +0000 UTC m=+1382.815749734" observedRunningTime="2025-10-11 03:09:32.487032315 +0000 UTC m=+1383.420119959" watchObservedRunningTime="2025-10-11 03:09:32.488013279 +0000 UTC m=+1383.421100923" Oct 11 03:09:38 crc kubenswrapper[4953]: I1011 03:09:38.184205 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:38 crc kubenswrapper[4953]: I1011 03:09:38.184717 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:38 crc kubenswrapper[4953]: I1011 03:09:38.246318 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:38 crc kubenswrapper[4953]: I1011 03:09:38.591165 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:38 crc kubenswrapper[4953]: I1011 03:09:38.654067 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4gw95"] Oct 11 03:09:40 crc kubenswrapper[4953]: I1011 03:09:40.568219 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4gw95" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="registry-server" containerID="cri-o://eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd" gracePeriod=2 Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.036710 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.104912 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-catalog-content\") pod \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.105218 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw25b\" (UniqueName: \"kubernetes.io/projected/b8c45a64-5876-4411-ba2f-05bcae44a3ea-kube-api-access-bw25b\") pod \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.105268 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-utilities\") pod \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\" (UID: \"b8c45a64-5876-4411-ba2f-05bcae44a3ea\") " Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.106319 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-utilities" (OuterVolumeSpecName: "utilities") pod "b8c45a64-5876-4411-ba2f-05bcae44a3ea" (UID: "b8c45a64-5876-4411-ba2f-05bcae44a3ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.113945 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8c45a64-5876-4411-ba2f-05bcae44a3ea-kube-api-access-bw25b" (OuterVolumeSpecName: "kube-api-access-bw25b") pod "b8c45a64-5876-4411-ba2f-05bcae44a3ea" (UID: "b8c45a64-5876-4411-ba2f-05bcae44a3ea"). InnerVolumeSpecName "kube-api-access-bw25b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.168294 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8c45a64-5876-4411-ba2f-05bcae44a3ea" (UID: "b8c45a64-5876-4411-ba2f-05bcae44a3ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.207881 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw25b\" (UniqueName: \"kubernetes.io/projected/b8c45a64-5876-4411-ba2f-05bcae44a3ea-kube-api-access-bw25b\") on node \"crc\" DevicePath \"\"" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.207914 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.207926 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8c45a64-5876-4411-ba2f-05bcae44a3ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.580704 4953 generic.go:334] "Generic (PLEG): container finished" podID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerID="eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd" exitCode=0 Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.580770 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4gw95" event={"ID":"b8c45a64-5876-4411-ba2f-05bcae44a3ea","Type":"ContainerDied","Data":"eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd"} Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.580808 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4gw95" event={"ID":"b8c45a64-5876-4411-ba2f-05bcae44a3ea","Type":"ContainerDied","Data":"88af3e35e99ca7896010ffd22dd04642fa86abd02565b7d7706850bbce8794d3"} Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.580839 4953 scope.go:117] "RemoveContainer" containerID="eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.581044 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4gw95" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.621850 4953 scope.go:117] "RemoveContainer" containerID="7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.626156 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4gw95"] Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.636053 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4gw95"] Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.645884 4953 scope.go:117] "RemoveContainer" containerID="ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.688692 4953 scope.go:117] "RemoveContainer" containerID="eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd" Oct 11 03:09:41 crc kubenswrapper[4953]: E1011 03:09:41.696245 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd\": container with ID starting with eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd not found: ID does not exist" containerID="eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.696361 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd"} err="failed to get container status \"eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd\": rpc error: code = NotFound desc = could not find container \"eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd\": container with ID starting with eee2af55c3b5b337907d6f7328748bc28d15f5973bd5c0322f62401d86a4e6dd not found: ID does not exist" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.696420 4953 scope.go:117] "RemoveContainer" containerID="7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960" Oct 11 03:09:41 crc kubenswrapper[4953]: E1011 03:09:41.696902 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960\": container with ID starting with 7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960 not found: ID does not exist" containerID="7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.696940 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960"} err="failed to get container status \"7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960\": rpc error: code = NotFound desc = could not find container \"7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960\": container with ID starting with 7836efa74dc3d55470f00002d78a441cfaa955c7312263b056cf119fb2e1d960 not found: ID does not exist" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.696961 4953 scope.go:117] "RemoveContainer" containerID="ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819" Oct 11 03:09:41 crc kubenswrapper[4953]: E1011 03:09:41.697294 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819\": container with ID starting with ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819 not found: ID does not exist" containerID="ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.697323 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819"} err="failed to get container status \"ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819\": rpc error: code = NotFound desc = could not find container \"ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819\": container with ID starting with ba363bf46200c335d0dc34d087691b6d79e4cd60532b8f85770bae5b86732819 not found: ID does not exist" Oct 11 03:09:41 crc kubenswrapper[4953]: I1011 03:09:41.807040 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" path="/var/lib/kubelet/pods/b8c45a64-5876-4411-ba2f-05bcae44a3ea/volumes" Oct 11 03:09:46 crc kubenswrapper[4953]: I1011 03:09:46.586963 4953 scope.go:117] "RemoveContainer" containerID="4bdc404244e8c8339f5071768e3a229ae3c2d3317b71e7fc0e3327d0061724bd" Oct 11 03:09:46 crc kubenswrapper[4953]: I1011 03:09:46.619066 4953 scope.go:117] "RemoveContainer" containerID="d0be1edac47fcdf9c60fce64ee411e7e027691a247156c9796c49933c73ca8c0" Oct 11 03:09:46 crc kubenswrapper[4953]: I1011 03:09:46.722685 4953 scope.go:117] "RemoveContainer" containerID="3e668ce6ea362a40526cc9203aaac5b526d3ae2e0859f26e44648fcadc79cf5c" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.161382 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c8sgr"] Oct 11 03:10:32 crc kubenswrapper[4953]: E1011 03:10:32.162339 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="registry-server" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.162355 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="registry-server" Oct 11 03:10:32 crc kubenswrapper[4953]: E1011 03:10:32.162380 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="extract-content" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.162387 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="extract-content" Oct 11 03:10:32 crc kubenswrapper[4953]: E1011 03:10:32.162409 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="extract-utilities" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.162417 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="extract-utilities" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.162666 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8c45a64-5876-4411-ba2f-05bcae44a3ea" containerName="registry-server" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.164638 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.180965 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c8sgr"] Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.263257 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9c6h\" (UniqueName: \"kubernetes.io/projected/e935ba9d-6fbe-48d9-a1b1-258f02649330-kube-api-access-h9c6h\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.263466 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-catalog-content\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.263551 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-utilities\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.366061 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-utilities\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.366193 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9c6h\" (UniqueName: \"kubernetes.io/projected/e935ba9d-6fbe-48d9-a1b1-258f02649330-kube-api-access-h9c6h\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.366291 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-catalog-content\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.366997 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-catalog-content\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.367004 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-utilities\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.393405 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9c6h\" (UniqueName: \"kubernetes.io/projected/e935ba9d-6fbe-48d9-a1b1-258f02649330-kube-api-access-h9c6h\") pod \"certified-operators-c8sgr\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.485695 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:32 crc kubenswrapper[4953]: I1011 03:10:32.983167 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c8sgr"] Oct 11 03:10:33 crc kubenswrapper[4953]: I1011 03:10:33.201783 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerStarted","Data":"ef86887f9956b11998c0f92c78e1a5b4f4dc4a551fcf5ee76cd1001312b59b98"} Oct 11 03:10:34 crc kubenswrapper[4953]: I1011 03:10:34.217326 4953 generic.go:334] "Generic (PLEG): container finished" podID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerID="5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe" exitCode=0 Oct 11 03:10:34 crc kubenswrapper[4953]: I1011 03:10:34.217428 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerDied","Data":"5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe"} Oct 11 03:10:35 crc kubenswrapper[4953]: I1011 03:10:35.229393 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerStarted","Data":"b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124"} Oct 11 03:10:36 crc kubenswrapper[4953]: I1011 03:10:36.243461 4953 generic.go:334] "Generic (PLEG): container finished" podID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerID="b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124" exitCode=0 Oct 11 03:10:36 crc kubenswrapper[4953]: I1011 03:10:36.243532 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerDied","Data":"b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124"} Oct 11 03:10:37 crc kubenswrapper[4953]: I1011 03:10:37.261337 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerStarted","Data":"50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed"} Oct 11 03:10:37 crc kubenswrapper[4953]: I1011 03:10:37.290376 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c8sgr" podStartSLOduration=2.89567284 podStartE2EDuration="5.290348742s" podCreationTimestamp="2025-10-11 03:10:32 +0000 UTC" firstStartedPulling="2025-10-11 03:10:34.220315144 +0000 UTC m=+1445.153402828" lastFinishedPulling="2025-10-11 03:10:36.614991086 +0000 UTC m=+1447.548078730" observedRunningTime="2025-10-11 03:10:37.28470089 +0000 UTC m=+1448.217788544" watchObservedRunningTime="2025-10-11 03:10:37.290348742 +0000 UTC m=+1448.223436426" Oct 11 03:10:42 crc kubenswrapper[4953]: I1011 03:10:42.486107 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:42 crc kubenswrapper[4953]: I1011 03:10:42.488367 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:42 crc kubenswrapper[4953]: I1011 03:10:42.535810 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:43 crc kubenswrapper[4953]: I1011 03:10:43.378315 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:43 crc kubenswrapper[4953]: I1011 03:10:43.434786 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c8sgr"] Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.340275 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c8sgr" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="registry-server" containerID="cri-o://50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed" gracePeriod=2 Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.821316 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.919628 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-catalog-content\") pod \"e935ba9d-6fbe-48d9-a1b1-258f02649330\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.919741 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-utilities\") pod \"e935ba9d-6fbe-48d9-a1b1-258f02649330\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.919792 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9c6h\" (UniqueName: \"kubernetes.io/projected/e935ba9d-6fbe-48d9-a1b1-258f02649330-kube-api-access-h9c6h\") pod \"e935ba9d-6fbe-48d9-a1b1-258f02649330\" (UID: \"e935ba9d-6fbe-48d9-a1b1-258f02649330\") " Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.921797 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-utilities" (OuterVolumeSpecName: "utilities") pod "e935ba9d-6fbe-48d9-a1b1-258f02649330" (UID: "e935ba9d-6fbe-48d9-a1b1-258f02649330"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.926527 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e935ba9d-6fbe-48d9-a1b1-258f02649330-kube-api-access-h9c6h" (OuterVolumeSpecName: "kube-api-access-h9c6h") pod "e935ba9d-6fbe-48d9-a1b1-258f02649330" (UID: "e935ba9d-6fbe-48d9-a1b1-258f02649330"). InnerVolumeSpecName "kube-api-access-h9c6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:10:45 crc kubenswrapper[4953]: I1011 03:10:45.970205 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e935ba9d-6fbe-48d9-a1b1-258f02649330" (UID: "e935ba9d-6fbe-48d9-a1b1-258f02649330"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.022156 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.022189 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e935ba9d-6fbe-48d9-a1b1-258f02649330-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.022199 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9c6h\" (UniqueName: \"kubernetes.io/projected/e935ba9d-6fbe-48d9-a1b1-258f02649330-kube-api-access-h9c6h\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.351692 4953 generic.go:334] "Generic (PLEG): container finished" podID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerID="50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed" exitCode=0 Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.351739 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c8sgr" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.351761 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerDied","Data":"50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed"} Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.352083 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c8sgr" event={"ID":"e935ba9d-6fbe-48d9-a1b1-258f02649330","Type":"ContainerDied","Data":"ef86887f9956b11998c0f92c78e1a5b4f4dc4a551fcf5ee76cd1001312b59b98"} Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.352102 4953 scope.go:117] "RemoveContainer" containerID="50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.372864 4953 scope.go:117] "RemoveContainer" containerID="b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.384756 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c8sgr"] Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.391117 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c8sgr"] Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.420701 4953 scope.go:117] "RemoveContainer" containerID="5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.461137 4953 scope.go:117] "RemoveContainer" containerID="50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed" Oct 11 03:10:46 crc kubenswrapper[4953]: E1011 03:10:46.461964 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed\": container with ID starting with 50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed not found: ID does not exist" containerID="50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.462068 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed"} err="failed to get container status \"50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed\": rpc error: code = NotFound desc = could not find container \"50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed\": container with ID starting with 50b43548435147eb4256dd29731ad257d1254e01f1d57820429e6935042b04ed not found: ID does not exist" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.462146 4953 scope.go:117] "RemoveContainer" containerID="b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124" Oct 11 03:10:46 crc kubenswrapper[4953]: E1011 03:10:46.462772 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124\": container with ID starting with b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124 not found: ID does not exist" containerID="b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.462821 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124"} err="failed to get container status \"b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124\": rpc error: code = NotFound desc = could not find container \"b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124\": container with ID starting with b84f11ec446c1fa6884d82112d5b161d6a641972fc12330585e2c5e3e7cb0124 not found: ID does not exist" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.462855 4953 scope.go:117] "RemoveContainer" containerID="5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe" Oct 11 03:10:46 crc kubenswrapper[4953]: E1011 03:10:46.463193 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe\": container with ID starting with 5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe not found: ID does not exist" containerID="5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe" Oct 11 03:10:46 crc kubenswrapper[4953]: I1011 03:10:46.463285 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe"} err="failed to get container status \"5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe\": rpc error: code = NotFound desc = could not find container \"5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe\": container with ID starting with 5d41e5af18859c167dfc8770c1f07d506586b23299751f423c5fefcd4bba62fe not found: ID does not exist" Oct 11 03:10:47 crc kubenswrapper[4953]: I1011 03:10:47.806093 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" path="/var/lib/kubelet/pods/e935ba9d-6fbe-48d9-a1b1-258f02649330/volumes" Oct 11 03:10:56 crc kubenswrapper[4953]: I1011 03:10:56.466147 4953 generic.go:334] "Generic (PLEG): container finished" podID="b5d756a9-af2d-483a-9f6b-97974302220a" containerID="be70a08430be37c69d86203e14670da797433ca61110a5b71e06d8c57c9961a9" exitCode=0 Oct 11 03:10:56 crc kubenswrapper[4953]: I1011 03:10:56.466265 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" event={"ID":"b5d756a9-af2d-483a-9f6b-97974302220a","Type":"ContainerDied","Data":"be70a08430be37c69d86203e14670da797433ca61110a5b71e06d8c57c9961a9"} Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.040191 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.158896 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-bootstrap-combined-ca-bundle\") pod \"b5d756a9-af2d-483a-9f6b-97974302220a\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.158966 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bhm5\" (UniqueName: \"kubernetes.io/projected/b5d756a9-af2d-483a-9f6b-97974302220a-kube-api-access-8bhm5\") pod \"b5d756a9-af2d-483a-9f6b-97974302220a\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.159026 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-ssh-key\") pod \"b5d756a9-af2d-483a-9f6b-97974302220a\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.159107 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-inventory\") pod \"b5d756a9-af2d-483a-9f6b-97974302220a\" (UID: \"b5d756a9-af2d-483a-9f6b-97974302220a\") " Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.164528 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b5d756a9-af2d-483a-9f6b-97974302220a" (UID: "b5d756a9-af2d-483a-9f6b-97974302220a"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.165252 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5d756a9-af2d-483a-9f6b-97974302220a-kube-api-access-8bhm5" (OuterVolumeSpecName: "kube-api-access-8bhm5") pod "b5d756a9-af2d-483a-9f6b-97974302220a" (UID: "b5d756a9-af2d-483a-9f6b-97974302220a"). InnerVolumeSpecName "kube-api-access-8bhm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.185191 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-inventory" (OuterVolumeSpecName: "inventory") pod "b5d756a9-af2d-483a-9f6b-97974302220a" (UID: "b5d756a9-af2d-483a-9f6b-97974302220a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.185498 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b5d756a9-af2d-483a-9f6b-97974302220a" (UID: "b5d756a9-af2d-483a-9f6b-97974302220a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.260614 4953 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.260645 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bhm5\" (UniqueName: \"kubernetes.io/projected/b5d756a9-af2d-483a-9f6b-97974302220a-kube-api-access-8bhm5\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.260654 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.260666 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5d756a9-af2d-483a-9f6b-97974302220a-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.489187 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" event={"ID":"b5d756a9-af2d-483a-9f6b-97974302220a","Type":"ContainerDied","Data":"3672b51850d460876628066535edd34fab051188520ad8cf84d4cf80a4f5fc1f"} Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.489234 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3672b51850d460876628066535edd34fab051188520ad8cf84d4cf80a4f5fc1f" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.489267 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.589833 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64"] Oct 11 03:10:58 crc kubenswrapper[4953]: E1011 03:10:58.590851 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="extract-content" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.590872 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="extract-content" Oct 11 03:10:58 crc kubenswrapper[4953]: E1011 03:10:58.590897 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="extract-utilities" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.590905 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="extract-utilities" Oct 11 03:10:58 crc kubenswrapper[4953]: E1011 03:10:58.590925 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5d756a9-af2d-483a-9f6b-97974302220a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.590931 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5d756a9-af2d-483a-9f6b-97974302220a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 03:10:58 crc kubenswrapper[4953]: E1011 03:10:58.590964 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="registry-server" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.590971 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="registry-server" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.591441 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5d756a9-af2d-483a-9f6b-97974302220a" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.591496 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e935ba9d-6fbe-48d9-a1b1-258f02649330" containerName="registry-server" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.597495 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.601260 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.601534 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.601968 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.602701 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.610803 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64"] Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.669734 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.669882 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xszw\" (UniqueName: \"kubernetes.io/projected/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-kube-api-access-2xszw\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.670075 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.771632 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.771687 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xszw\" (UniqueName: \"kubernetes.io/projected/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-kube-api-access-2xszw\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.771729 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.780236 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.785172 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.789927 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xszw\" (UniqueName: \"kubernetes.io/projected/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-kube-api-access-2xszw\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pwb64\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:58 crc kubenswrapper[4953]: I1011 03:10:58.938578 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:10:59 crc kubenswrapper[4953]: I1011 03:10:59.482078 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64"] Oct 11 03:11:00 crc kubenswrapper[4953]: I1011 03:11:00.507147 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" event={"ID":"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6","Type":"ContainerStarted","Data":"c70d04fc4d6012d439ef661c72ac8aaf720d7beddb3451f1a50d15565e7942b4"} Oct 11 03:11:00 crc kubenswrapper[4953]: I1011 03:11:00.507428 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" event={"ID":"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6","Type":"ContainerStarted","Data":"7765b2426024fb5dff9345a928fcec2f33f629138806cf80fa65798f5505faa1"} Oct 11 03:11:00 crc kubenswrapper[4953]: I1011 03:11:00.531728 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" podStartSLOduration=2.032255216 podStartE2EDuration="2.531712188s" podCreationTimestamp="2025-10-11 03:10:58 +0000 UTC" firstStartedPulling="2025-10-11 03:10:59.494889589 +0000 UTC m=+1470.427977253" lastFinishedPulling="2025-10-11 03:10:59.994346581 +0000 UTC m=+1470.927434225" observedRunningTime="2025-10-11 03:11:00.528337753 +0000 UTC m=+1471.461425417" watchObservedRunningTime="2025-10-11 03:11:00.531712188 +0000 UTC m=+1471.464799832" Oct 11 03:11:11 crc kubenswrapper[4953]: I1011 03:11:11.316892 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:11:11 crc kubenswrapper[4953]: I1011 03:11:11.317750 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:11:41 crc kubenswrapper[4953]: I1011 03:11:41.317048 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:11:41 crc kubenswrapper[4953]: I1011 03:11:41.317906 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:11:46 crc kubenswrapper[4953]: I1011 03:11:46.848832 4953 scope.go:117] "RemoveContainer" containerID="1372b2f441e452f1544cd8ee7439fef8aa34915e9b6e05163146195fbba20993" Oct 11 03:11:46 crc kubenswrapper[4953]: I1011 03:11:46.888209 4953 scope.go:117] "RemoveContainer" containerID="d880235da6c9dd4ba0f1516b7e5579d2230bc415a5f17c7b398275b343f9d70b" Oct 11 03:12:11 crc kubenswrapper[4953]: I1011 03:12:11.317099 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:12:11 crc kubenswrapper[4953]: I1011 03:12:11.317699 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:12:11 crc kubenswrapper[4953]: I1011 03:12:11.317751 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:12:11 crc kubenswrapper[4953]: I1011 03:12:11.318560 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:12:11 crc kubenswrapper[4953]: I1011 03:12:11.318652 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" gracePeriod=600 Oct 11 03:12:11 crc kubenswrapper[4953]: E1011 03:12:11.456469 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.050896 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-cb9dw"] Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.061559 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-swwlm"] Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.073942 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-swwlm"] Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.083125 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-cb9dw"] Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.291391 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" exitCode=0 Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.291461 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b"} Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.291518 4953 scope.go:117] "RemoveContainer" containerID="2f93ff398f2fda2d5067e3ee2a670721116ec74b79a52f1ac00d56dd311dccce" Oct 11 03:12:12 crc kubenswrapper[4953]: I1011 03:12:12.292388 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:12:12 crc kubenswrapper[4953]: E1011 03:12:12.293006 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:12:13 crc kubenswrapper[4953]: I1011 03:12:13.812889 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c1b94b5-2f83-482e-a795-1e7b307f168e" path="/var/lib/kubelet/pods/9c1b94b5-2f83-482e-a795-1e7b307f168e/volumes" Oct 11 03:12:13 crc kubenswrapper[4953]: I1011 03:12:13.814726 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3131a25-a541-407f-911d-95c20d7f368a" path="/var/lib/kubelet/pods/f3131a25-a541-407f-911d-95c20d7f368a/volumes" Oct 11 03:12:16 crc kubenswrapper[4953]: I1011 03:12:16.344480 4953 generic.go:334] "Generic (PLEG): container finished" podID="7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" containerID="c70d04fc4d6012d439ef661c72ac8aaf720d7beddb3451f1a50d15565e7942b4" exitCode=0 Oct 11 03:12:16 crc kubenswrapper[4953]: I1011 03:12:16.344704 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" event={"ID":"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6","Type":"ContainerDied","Data":"c70d04fc4d6012d439ef661c72ac8aaf720d7beddb3451f1a50d15565e7942b4"} Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.040138 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-zlg2z"] Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.053085 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-zlg2z"] Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.805789 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ff743f4-1191-4f96-aa4d-3863163cd0a3" path="/var/lib/kubelet/pods/8ff743f4-1191-4f96-aa4d-3863163cd0a3/volumes" Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.832354 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.925726 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-ssh-key\") pod \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.926057 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xszw\" (UniqueName: \"kubernetes.io/projected/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-kube-api-access-2xszw\") pod \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.926111 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-inventory\") pod \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\" (UID: \"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6\") " Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.933662 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-kube-api-access-2xszw" (OuterVolumeSpecName: "kube-api-access-2xszw") pod "7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" (UID: "7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6"). InnerVolumeSpecName "kube-api-access-2xszw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.956126 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" (UID: "7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:12:17 crc kubenswrapper[4953]: I1011 03:12:17.971164 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-inventory" (OuterVolumeSpecName: "inventory") pod "7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" (UID: "7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.028391 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.028441 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.028461 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xszw\" (UniqueName: \"kubernetes.io/projected/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6-kube-api-access-2xszw\") on node \"crc\" DevicePath \"\"" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.371305 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" event={"ID":"7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6","Type":"ContainerDied","Data":"7765b2426024fb5dff9345a928fcec2f33f629138806cf80fa65798f5505faa1"} Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.371370 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7765b2426024fb5dff9345a928fcec2f33f629138806cf80fa65798f5505faa1" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.371446 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.484889 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9"] Oct 11 03:12:18 crc kubenswrapper[4953]: E1011 03:12:18.486023 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.486090 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.486590 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.488450 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.491503 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.491778 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.492161 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.492289 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.518467 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9"] Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.541677 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h6b8\" (UniqueName: \"kubernetes.io/projected/2598cf72-6bcd-4d4d-a34d-b157fab230fd-kube-api-access-6h6b8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.541977 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.542830 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.645071 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.645163 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.645329 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h6b8\" (UniqueName: \"kubernetes.io/projected/2598cf72-6bcd-4d4d-a34d-b157fab230fd-kube-api-access-6h6b8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.650356 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.650727 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.661227 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h6b8\" (UniqueName: \"kubernetes.io/projected/2598cf72-6bcd-4d4d-a34d-b157fab230fd-kube-api-access-6h6b8\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:18 crc kubenswrapper[4953]: I1011 03:12:18.820423 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:19 crc kubenswrapper[4953]: I1011 03:12:19.233387 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9"] Oct 11 03:12:19 crc kubenswrapper[4953]: I1011 03:12:19.237904 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:12:19 crc kubenswrapper[4953]: I1011 03:12:19.382211 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" event={"ID":"2598cf72-6bcd-4d4d-a34d-b157fab230fd","Type":"ContainerStarted","Data":"c1213eb7e4a7f7ddd384719de319feac1304adfbd7e976e70d938687448a008c"} Oct 11 03:12:20 crc kubenswrapper[4953]: I1011 03:12:20.401251 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" event={"ID":"2598cf72-6bcd-4d4d-a34d-b157fab230fd","Type":"ContainerStarted","Data":"51547b20134e6ad5d7fdc673e28d551a89377b8758d42b2fe5b30461863a501e"} Oct 11 03:12:21 crc kubenswrapper[4953]: I1011 03:12:21.046743 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" podStartSLOduration=2.518857383 podStartE2EDuration="3.046723181s" podCreationTimestamp="2025-10-11 03:12:18 +0000 UTC" firstStartedPulling="2025-10-11 03:12:19.237362016 +0000 UTC m=+1550.170449690" lastFinishedPulling="2025-10-11 03:12:19.765227824 +0000 UTC m=+1550.698315488" observedRunningTime="2025-10-11 03:12:20.427674515 +0000 UTC m=+1551.360762169" watchObservedRunningTime="2025-10-11 03:12:21.046723181 +0000 UTC m=+1551.979810825" Oct 11 03:12:21 crc kubenswrapper[4953]: I1011 03:12:21.053092 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-51a6-account-create-pt5kn"] Oct 11 03:12:21 crc kubenswrapper[4953]: I1011 03:12:21.060954 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-51a6-account-create-pt5kn"] Oct 11 03:12:21 crc kubenswrapper[4953]: I1011 03:12:21.809247 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fa0f95a-70f4-4ab2-9f09-85506fb7160b" path="/var/lib/kubelet/pods/0fa0f95a-70f4-4ab2-9f09-85506fb7160b/volumes" Oct 11 03:12:22 crc kubenswrapper[4953]: I1011 03:12:22.038538 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-d9a5-account-create-cgdnn"] Oct 11 03:12:22 crc kubenswrapper[4953]: I1011 03:12:22.046922 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-d9a5-account-create-cgdnn"] Oct 11 03:12:23 crc kubenswrapper[4953]: I1011 03:12:23.816770 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6" path="/var/lib/kubelet/pods/2b4d1d54-cdb9-4d30-90e6-23d0ac44e1c6/volumes" Oct 11 03:12:24 crc kubenswrapper[4953]: I1011 03:12:24.796029 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:12:24 crc kubenswrapper[4953]: E1011 03:12:24.796589 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:12:25 crc kubenswrapper[4953]: I1011 03:12:25.450635 4953 generic.go:334] "Generic (PLEG): container finished" podID="2598cf72-6bcd-4d4d-a34d-b157fab230fd" containerID="51547b20134e6ad5d7fdc673e28d551a89377b8758d42b2fe5b30461863a501e" exitCode=0 Oct 11 03:12:25 crc kubenswrapper[4953]: I1011 03:12:25.450683 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" event={"ID":"2598cf72-6bcd-4d4d-a34d-b157fab230fd","Type":"ContainerDied","Data":"51547b20134e6ad5d7fdc673e28d551a89377b8758d42b2fe5b30461863a501e"} Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.860501 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.901076 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-ssh-key\") pod \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.901143 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-inventory\") pod \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.901192 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6h6b8\" (UniqueName: \"kubernetes.io/projected/2598cf72-6bcd-4d4d-a34d-b157fab230fd-kube-api-access-6h6b8\") pod \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\" (UID: \"2598cf72-6bcd-4d4d-a34d-b157fab230fd\") " Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.906919 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2598cf72-6bcd-4d4d-a34d-b157fab230fd-kube-api-access-6h6b8" (OuterVolumeSpecName: "kube-api-access-6h6b8") pod "2598cf72-6bcd-4d4d-a34d-b157fab230fd" (UID: "2598cf72-6bcd-4d4d-a34d-b157fab230fd"). InnerVolumeSpecName "kube-api-access-6h6b8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.927846 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-inventory" (OuterVolumeSpecName: "inventory") pod "2598cf72-6bcd-4d4d-a34d-b157fab230fd" (UID: "2598cf72-6bcd-4d4d-a34d-b157fab230fd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:12:26 crc kubenswrapper[4953]: I1011 03:12:26.932855 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2598cf72-6bcd-4d4d-a34d-b157fab230fd" (UID: "2598cf72-6bcd-4d4d-a34d-b157fab230fd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.003698 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6h6b8\" (UniqueName: \"kubernetes.io/projected/2598cf72-6bcd-4d4d-a34d-b157fab230fd-kube-api-access-6h6b8\") on node \"crc\" DevicePath \"\"" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.003744 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.003757 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2598cf72-6bcd-4d4d-a34d-b157fab230fd-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.030543 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-8905-account-create-lt7np"] Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.041363 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-8905-account-create-lt7np"] Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.473429 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" event={"ID":"2598cf72-6bcd-4d4d-a34d-b157fab230fd","Type":"ContainerDied","Data":"c1213eb7e4a7f7ddd384719de319feac1304adfbd7e976e70d938687448a008c"} Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.473475 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1213eb7e4a7f7ddd384719de319feac1304adfbd7e976e70d938687448a008c" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.473509 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.542413 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx"] Oct 11 03:12:27 crc kubenswrapper[4953]: E1011 03:12:27.543264 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2598cf72-6bcd-4d4d-a34d-b157fab230fd" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.543286 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2598cf72-6bcd-4d4d-a34d-b157fab230fd" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.543494 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2598cf72-6bcd-4d4d-a34d-b157fab230fd" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.544087 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.546238 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.546546 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.546777 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.546995 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.553179 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx"] Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.623141 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.623188 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shzdn\" (UniqueName: \"kubernetes.io/projected/53512175-e6aa-4d31-aa79-0d0c651a61dd-kube-api-access-shzdn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.623261 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.726062 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.727649 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.727735 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shzdn\" (UniqueName: \"kubernetes.io/projected/53512175-e6aa-4d31-aa79-0d0c651a61dd-kube-api-access-shzdn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.731056 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.731194 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.751333 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shzdn\" (UniqueName: \"kubernetes.io/projected/53512175-e6aa-4d31-aa79-0d0c651a61dd-kube-api-access-shzdn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-qtjzx\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.811385 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc95d58b-c39c-4459-8a45-daae75958c99" path="/var/lib/kubelet/pods/bc95d58b-c39c-4459-8a45-daae75958c99/volumes" Oct 11 03:12:27 crc kubenswrapper[4953]: I1011 03:12:27.861935 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:12:28 crc kubenswrapper[4953]: I1011 03:12:28.380853 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx"] Oct 11 03:12:28 crc kubenswrapper[4953]: I1011 03:12:28.484236 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" event={"ID":"53512175-e6aa-4d31-aa79-0d0c651a61dd","Type":"ContainerStarted","Data":"528e0300f4210331537e16d434798ed5aa864012844b89f35df55f375a713cf2"} Oct 11 03:12:29 crc kubenswrapper[4953]: I1011 03:12:29.493143 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" event={"ID":"53512175-e6aa-4d31-aa79-0d0c651a61dd","Type":"ContainerStarted","Data":"54c6f8773b2c35522c9e9e1b1cb22430d96834e0b846a429c53d3d7817d43036"} Oct 11 03:12:39 crc kubenswrapper[4953]: I1011 03:12:39.802354 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:12:39 crc kubenswrapper[4953]: E1011 03:12:39.803195 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:12:46 crc kubenswrapper[4953]: I1011 03:12:46.988058 4953 scope.go:117] "RemoveContainer" containerID="9960828f480b3dfcb61f958e4556aee0dffda17587c2e6456f930519ebf59620" Oct 11 03:12:47 crc kubenswrapper[4953]: I1011 03:12:47.017179 4953 scope.go:117] "RemoveContainer" containerID="22b262232bab65ba935db174a446585ea647293af26a4516ef93354da9ae05a5" Oct 11 03:12:47 crc kubenswrapper[4953]: I1011 03:12:47.087437 4953 scope.go:117] "RemoveContainer" containerID="1fa8d3fcc2972616b5bf2f8807a67de77ca6ef3f2002c9551329f47e547c49f4" Oct 11 03:12:47 crc kubenswrapper[4953]: I1011 03:12:47.133366 4953 scope.go:117] "RemoveContainer" containerID="bd717d659eff966309fdb8c04428b96b16b9c97691315c49476a8ebd69a0337f" Oct 11 03:12:47 crc kubenswrapper[4953]: I1011 03:12:47.169144 4953 scope.go:117] "RemoveContainer" containerID="f81bb8c88b11c870e065cdf1143bf7b57e272188b07b7bffbd3b23e6d03061e3" Oct 11 03:12:47 crc kubenswrapper[4953]: I1011 03:12:47.235666 4953 scope.go:117] "RemoveContainer" containerID="5991c895b46e2ae00c9f586a26af25cedba7283d65543a7fb9f486bd8ba39bfa" Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.049462 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" podStartSLOduration=23.626257759 podStartE2EDuration="24.049438887s" podCreationTimestamp="2025-10-11 03:12:27 +0000 UTC" firstStartedPulling="2025-10-11 03:12:28.390141256 +0000 UTC m=+1559.323228900" lastFinishedPulling="2025-10-11 03:12:28.813322354 +0000 UTC m=+1559.746410028" observedRunningTime="2025-10-11 03:12:29.510009909 +0000 UTC m=+1560.443097553" watchObservedRunningTime="2025-10-11 03:12:51.049438887 +0000 UTC m=+1581.982526541" Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.066824 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-phsz2"] Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.076701 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rjhfk"] Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.089308 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-579kj"] Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.098159 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rjhfk"] Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.104255 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-579kj"] Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.110185 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-phsz2"] Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.822268 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49cfe7db-879e-49c5-a881-df7508c56004" path="/var/lib/kubelet/pods/49cfe7db-879e-49c5-a881-df7508c56004/volumes" Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.823688 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b7bc820-b928-47b5-83b3-3bca09771317" path="/var/lib/kubelet/pods/7b7bc820-b928-47b5-83b3-3bca09771317/volumes" Oct 11 03:12:51 crc kubenswrapper[4953]: I1011 03:12:51.825040 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93572d4d-c90c-484e-a428-47bd0925abba" path="/var/lib/kubelet/pods/93572d4d-c90c-484e-a428-47bd0925abba/volumes" Oct 11 03:12:54 crc kubenswrapper[4953]: I1011 03:12:54.795705 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:12:54 crc kubenswrapper[4953]: E1011 03:12:54.796655 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:12:55 crc kubenswrapper[4953]: I1011 03:12:55.047507 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-zg5l4"] Oct 11 03:12:55 crc kubenswrapper[4953]: I1011 03:12:55.090695 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-zg5l4"] Oct 11 03:12:55 crc kubenswrapper[4953]: I1011 03:12:55.815284 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17ba53d5-3bbc-41d3-98d8-6df81e07dc12" path="/var/lib/kubelet/pods/17ba53d5-3bbc-41d3-98d8-6df81e07dc12/volumes" Oct 11 03:13:00 crc kubenswrapper[4953]: I1011 03:13:00.043407 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-19a9-account-create-cj7vv"] Oct 11 03:13:00 crc kubenswrapper[4953]: I1011 03:13:00.057370 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-19a9-account-create-cj7vv"] Oct 11 03:13:01 crc kubenswrapper[4953]: I1011 03:13:01.810354 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="230c8b0d-9a31-45b6-8466-443ee8c0cfd4" path="/var/lib/kubelet/pods/230c8b0d-9a31-45b6-8466-443ee8c0cfd4/volumes" Oct 11 03:13:03 crc kubenswrapper[4953]: I1011 03:13:03.043924 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-eeb5-account-create-djwqq"] Oct 11 03:13:03 crc kubenswrapper[4953]: I1011 03:13:03.053857 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6d14-account-create-vdfm8"] Oct 11 03:13:03 crc kubenswrapper[4953]: I1011 03:13:03.060816 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6d14-account-create-vdfm8"] Oct 11 03:13:03 crc kubenswrapper[4953]: I1011 03:13:03.067425 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-eeb5-account-create-djwqq"] Oct 11 03:13:03 crc kubenswrapper[4953]: I1011 03:13:03.818109 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a" path="/var/lib/kubelet/pods/5b8880b9-7bdd-48ff-8f4b-5d2c07d16b0a/volumes" Oct 11 03:13:03 crc kubenswrapper[4953]: I1011 03:13:03.819842 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5af1453-5d36-4384-860e-9b53ee150124" path="/var/lib/kubelet/pods/e5af1453-5d36-4384-860e-9b53ee150124/volumes" Oct 11 03:13:05 crc kubenswrapper[4953]: I1011 03:13:05.796077 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:13:05 crc kubenswrapper[4953]: E1011 03:13:05.796661 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:13:13 crc kubenswrapper[4953]: I1011 03:13:13.975636 4953 generic.go:334] "Generic (PLEG): container finished" podID="53512175-e6aa-4d31-aa79-0d0c651a61dd" containerID="54c6f8773b2c35522c9e9e1b1cb22430d96834e0b846a429c53d3d7817d43036" exitCode=0 Oct 11 03:13:13 crc kubenswrapper[4953]: I1011 03:13:13.975785 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" event={"ID":"53512175-e6aa-4d31-aa79-0d0c651a61dd","Type":"ContainerDied","Data":"54c6f8773b2c35522c9e9e1b1cb22430d96834e0b846a429c53d3d7817d43036"} Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.566421 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.665021 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-inventory\") pod \"53512175-e6aa-4d31-aa79-0d0c651a61dd\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.665172 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-ssh-key\") pod \"53512175-e6aa-4d31-aa79-0d0c651a61dd\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.665737 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shzdn\" (UniqueName: \"kubernetes.io/projected/53512175-e6aa-4d31-aa79-0d0c651a61dd-kube-api-access-shzdn\") pod \"53512175-e6aa-4d31-aa79-0d0c651a61dd\" (UID: \"53512175-e6aa-4d31-aa79-0d0c651a61dd\") " Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.671024 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53512175-e6aa-4d31-aa79-0d0c651a61dd-kube-api-access-shzdn" (OuterVolumeSpecName: "kube-api-access-shzdn") pod "53512175-e6aa-4d31-aa79-0d0c651a61dd" (UID: "53512175-e6aa-4d31-aa79-0d0c651a61dd"). InnerVolumeSpecName "kube-api-access-shzdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.692557 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-inventory" (OuterVolumeSpecName: "inventory") pod "53512175-e6aa-4d31-aa79-0d0c651a61dd" (UID: "53512175-e6aa-4d31-aa79-0d0c651a61dd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.702556 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "53512175-e6aa-4d31-aa79-0d0c651a61dd" (UID: "53512175-e6aa-4d31-aa79-0d0c651a61dd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.768116 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shzdn\" (UniqueName: \"kubernetes.io/projected/53512175-e6aa-4d31-aa79-0d0c651a61dd-kube-api-access-shzdn\") on node \"crc\" DevicePath \"\"" Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.768177 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:13:15 crc kubenswrapper[4953]: I1011 03:13:15.768196 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53512175-e6aa-4d31-aa79-0d0c651a61dd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.008250 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" event={"ID":"53512175-e6aa-4d31-aa79-0d0c651a61dd","Type":"ContainerDied","Data":"528e0300f4210331537e16d434798ed5aa864012844b89f35df55f375a713cf2"} Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.010273 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="528e0300f4210331537e16d434798ed5aa864012844b89f35df55f375a713cf2" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.010344 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.052408 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-2wgl5"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.063115 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-pkkj5"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.071148 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-2wgl5"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.079586 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-pkkj5"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.095426 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-zkswt"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.101772 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-zkswt"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.112989 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf"] Oct 11 03:13:16 crc kubenswrapper[4953]: E1011 03:13:16.113433 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53512175-e6aa-4d31-aa79-0d0c651a61dd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.113455 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="53512175-e6aa-4d31-aa79-0d0c651a61dd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.113709 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="53512175-e6aa-4d31-aa79-0d0c651a61dd" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.114754 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.118308 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.119942 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.120274 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.120559 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.124307 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf"] Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.276943 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.277066 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.277140 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4qtd\" (UniqueName: \"kubernetes.io/projected/9bf05143-ee92-4be9-a6a7-0138e0e621c5-kube-api-access-q4qtd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.378278 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.378384 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.378445 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4qtd\" (UniqueName: \"kubernetes.io/projected/9bf05143-ee92-4be9-a6a7-0138e0e621c5-kube-api-access-q4qtd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.383029 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.383074 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.395268 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4qtd\" (UniqueName: \"kubernetes.io/projected/9bf05143-ee92-4be9-a6a7-0138e0e621c5-kube-api-access-q4qtd\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:16 crc kubenswrapper[4953]: I1011 03:13:16.443266 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:17 crc kubenswrapper[4953]: I1011 03:13:17.007103 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf"] Oct 11 03:13:17 crc kubenswrapper[4953]: I1011 03:13:17.796162 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:13:17 crc kubenswrapper[4953]: E1011 03:13:17.797005 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:13:17 crc kubenswrapper[4953]: I1011 03:13:17.825299 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="421c9157-2796-4c8a-84db-ae0a142d1155" path="/var/lib/kubelet/pods/421c9157-2796-4c8a-84db-ae0a142d1155/volumes" Oct 11 03:13:17 crc kubenswrapper[4953]: I1011 03:13:17.825909 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="485ebf6f-d6da-48bf-9d6a-cb353d9082f5" path="/var/lib/kubelet/pods/485ebf6f-d6da-48bf-9d6a-cb353d9082f5/volumes" Oct 11 03:13:17 crc kubenswrapper[4953]: I1011 03:13:17.826687 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84b8eec5-36ec-46dd-a308-f9d2103cb125" path="/var/lib/kubelet/pods/84b8eec5-36ec-46dd-a308-f9d2103cb125/volumes" Oct 11 03:13:18 crc kubenswrapper[4953]: I1011 03:13:18.037334 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" event={"ID":"9bf05143-ee92-4be9-a6a7-0138e0e621c5","Type":"ContainerStarted","Data":"8aaa30bd0a2578dc1fb82afcd47537ba3ce1fc69273bccb8ea6f5ee456c7a3e3"} Oct 11 03:13:18 crc kubenswrapper[4953]: I1011 03:13:18.038235 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" event={"ID":"9bf05143-ee92-4be9-a6a7-0138e0e621c5","Type":"ContainerStarted","Data":"61285e3912def0a4c8a421c1607084016fb27f69794459164642d3b990c417ba"} Oct 11 03:13:18 crc kubenswrapper[4953]: I1011 03:13:18.065240 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" podStartSLOduration=1.425660997 podStartE2EDuration="2.06522479s" podCreationTimestamp="2025-10-11 03:13:16 +0000 UTC" firstStartedPulling="2025-10-11 03:13:17.022911743 +0000 UTC m=+1607.955999397" lastFinishedPulling="2025-10-11 03:13:17.662475546 +0000 UTC m=+1608.595563190" observedRunningTime="2025-10-11 03:13:18.061195018 +0000 UTC m=+1608.994282662" watchObservedRunningTime="2025-10-11 03:13:18.06522479 +0000 UTC m=+1608.998312434" Oct 11 03:13:23 crc kubenswrapper[4953]: I1011 03:13:23.097477 4953 generic.go:334] "Generic (PLEG): container finished" podID="9bf05143-ee92-4be9-a6a7-0138e0e621c5" containerID="8aaa30bd0a2578dc1fb82afcd47537ba3ce1fc69273bccb8ea6f5ee456c7a3e3" exitCode=0 Oct 11 03:13:23 crc kubenswrapper[4953]: I1011 03:13:23.097522 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" event={"ID":"9bf05143-ee92-4be9-a6a7-0138e0e621c5","Type":"ContainerDied","Data":"8aaa30bd0a2578dc1fb82afcd47537ba3ce1fc69273bccb8ea6f5ee456c7a3e3"} Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.581371 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.751835 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4qtd\" (UniqueName: \"kubernetes.io/projected/9bf05143-ee92-4be9-a6a7-0138e0e621c5-kube-api-access-q4qtd\") pod \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.752098 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-inventory\") pod \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.752143 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-ssh-key\") pod \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\" (UID: \"9bf05143-ee92-4be9-a6a7-0138e0e621c5\") " Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.759979 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bf05143-ee92-4be9-a6a7-0138e0e621c5-kube-api-access-q4qtd" (OuterVolumeSpecName: "kube-api-access-q4qtd") pod "9bf05143-ee92-4be9-a6a7-0138e0e621c5" (UID: "9bf05143-ee92-4be9-a6a7-0138e0e621c5"). InnerVolumeSpecName "kube-api-access-q4qtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.784959 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9bf05143-ee92-4be9-a6a7-0138e0e621c5" (UID: "9bf05143-ee92-4be9-a6a7-0138e0e621c5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.790831 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-inventory" (OuterVolumeSpecName: "inventory") pod "9bf05143-ee92-4be9-a6a7-0138e0e621c5" (UID: "9bf05143-ee92-4be9-a6a7-0138e0e621c5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.856382 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.856429 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bf05143-ee92-4be9-a6a7-0138e0e621c5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:13:24 crc kubenswrapper[4953]: I1011 03:13:24.856443 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4qtd\" (UniqueName: \"kubernetes.io/projected/9bf05143-ee92-4be9-a6a7-0138e0e621c5-kube-api-access-q4qtd\") on node \"crc\" DevicePath \"\"" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.119652 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" event={"ID":"9bf05143-ee92-4be9-a6a7-0138e0e621c5","Type":"ContainerDied","Data":"61285e3912def0a4c8a421c1607084016fb27f69794459164642d3b990c417ba"} Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.119709 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61285e3912def0a4c8a421c1607084016fb27f69794459164642d3b990c417ba" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.119764 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.203978 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m"] Oct 11 03:13:25 crc kubenswrapper[4953]: E1011 03:13:25.204437 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bf05143-ee92-4be9-a6a7-0138e0e621c5" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.204465 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bf05143-ee92-4be9-a6a7-0138e0e621c5" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.204714 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bf05143-ee92-4be9-a6a7-0138e0e621c5" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.205442 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.207753 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.209374 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.210022 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.211248 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.217571 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m"] Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.366956 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.367028 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.367538 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2vsq\" (UniqueName: \"kubernetes.io/projected/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-kube-api-access-b2vsq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.469686 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2vsq\" (UniqueName: \"kubernetes.io/projected/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-kube-api-access-b2vsq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.470083 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.470123 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.476974 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.477185 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.487242 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2vsq\" (UniqueName: \"kubernetes.io/projected/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-kube-api-access-b2vsq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:25 crc kubenswrapper[4953]: I1011 03:13:25.527452 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:13:26 crc kubenswrapper[4953]: I1011 03:13:26.169430 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m"] Oct 11 03:13:27 crc kubenswrapper[4953]: I1011 03:13:27.142646 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" event={"ID":"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3","Type":"ContainerStarted","Data":"cec342b479262ba7961555f0901452632cf6452ee3f1f2529f092edc78bfa3a6"} Oct 11 03:13:27 crc kubenswrapper[4953]: I1011 03:13:27.142995 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" event={"ID":"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3","Type":"ContainerStarted","Data":"7324c2536cf381bf0e7e14944b718c02cc40754c2f2d21f782dd8eb10641ef23"} Oct 11 03:13:27 crc kubenswrapper[4953]: I1011 03:13:27.179431 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" podStartSLOduration=1.663769828 podStartE2EDuration="2.179409538s" podCreationTimestamp="2025-10-11 03:13:25 +0000 UTC" firstStartedPulling="2025-10-11 03:13:26.174411661 +0000 UTC m=+1617.107499305" lastFinishedPulling="2025-10-11 03:13:26.690051371 +0000 UTC m=+1617.623139015" observedRunningTime="2025-10-11 03:13:27.163345273 +0000 UTC m=+1618.096432927" watchObservedRunningTime="2025-10-11 03:13:27.179409538 +0000 UTC m=+1618.112497182" Oct 11 03:13:29 crc kubenswrapper[4953]: I1011 03:13:29.806935 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:13:29 crc kubenswrapper[4953]: E1011 03:13:29.809320 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:13:38 crc kubenswrapper[4953]: I1011 03:13:38.052793 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-txcgm"] Oct 11 03:13:38 crc kubenswrapper[4953]: I1011 03:13:38.060247 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-txcgm"] Oct 11 03:13:39 crc kubenswrapper[4953]: I1011 03:13:39.805503 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a800d96f-d6e6-4698-8da8-a27dab6454b7" path="/var/lib/kubelet/pods/a800d96f-d6e6-4698-8da8-a27dab6454b7/volumes" Oct 11 03:13:42 crc kubenswrapper[4953]: I1011 03:13:42.796141 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:13:42 crc kubenswrapper[4953]: E1011 03:13:42.797633 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:13:43 crc kubenswrapper[4953]: I1011 03:13:43.040956 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-n95ss"] Oct 11 03:13:43 crc kubenswrapper[4953]: I1011 03:13:43.054095 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-n95ss"] Oct 11 03:13:43 crc kubenswrapper[4953]: I1011 03:13:43.810274 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="244dbbcd-798d-4069-8f37-83a0391a98d6" path="/var/lib/kubelet/pods/244dbbcd-798d-4069-8f37-83a0391a98d6/volumes" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.403665 4953 scope.go:117] "RemoveContainer" containerID="c07062dbee10f578669ce815d4910a8c77159505daef11ca4104593195fab831" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.438718 4953 scope.go:117] "RemoveContainer" containerID="72f93a1505b2b1b33f76372870851c9f47d5c1d8fd28e42def6851caab7d88be" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.472619 4953 scope.go:117] "RemoveContainer" containerID="4688b8bc882972856ba44f9bc23cd2d7814f42cdd5ae2608f664e3eb67f8daab" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.525596 4953 scope.go:117] "RemoveContainer" containerID="26fb9333ff0c208fec20bff39d7d613766b4902d27b3b818bf9b2c6e978be2d5" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.555447 4953 scope.go:117] "RemoveContainer" containerID="8b2e24d5954aee4beaa306dd3ea57854286c6c0168c083729183a15f9d79418d" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.593141 4953 scope.go:117] "RemoveContainer" containerID="e9f236b70ec79c0316d27c17c307d607664c6988da9666c242f762c4aa168384" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.628256 4953 scope.go:117] "RemoveContainer" containerID="3e8d13299e7f80d9203170c5a98bd3fe5d9f86a1cde1a5fabe6df12d86995aec" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.657221 4953 scope.go:117] "RemoveContainer" containerID="27eaa366ade9b3131fe3659b6c21d0deba1eea6513172aaa569b58b77ba2c2ee" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.712837 4953 scope.go:117] "RemoveContainer" containerID="1cba574eb5163399c111c61c5e2ec974e04c14a6e9ed020aca795bfaa6ca676c" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.755884 4953 scope.go:117] "RemoveContainer" containerID="02a904a45945aca7a7118b1402803b912d5a38c67fce97f29b1cec28abd86e31" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.776685 4953 scope.go:117] "RemoveContainer" containerID="999cee9c447248c53ecf119b5c2da3c9ba441c20076fb843035f30b811b85af2" Oct 11 03:13:47 crc kubenswrapper[4953]: I1011 03:13:47.810598 4953 scope.go:117] "RemoveContainer" containerID="1dc7ad40b3d4ae37563976f780b83bae6c09e17f3358a35dd38f71a71a23e234" Oct 11 03:13:53 crc kubenswrapper[4953]: I1011 03:13:53.797060 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:13:53 crc kubenswrapper[4953]: E1011 03:13:53.798215 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:13:55 crc kubenswrapper[4953]: I1011 03:13:55.045960 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-5xv4s"] Oct 11 03:13:55 crc kubenswrapper[4953]: I1011 03:13:55.052705 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-5xv4s"] Oct 11 03:13:55 crc kubenswrapper[4953]: I1011 03:13:55.809221 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37b17e82-1098-40f6-9a8b-ff2e863e5559" path="/var/lib/kubelet/pods/37b17e82-1098-40f6-9a8b-ff2e863e5559/volumes" Oct 11 03:14:05 crc kubenswrapper[4953]: I1011 03:14:05.795772 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:14:05 crc kubenswrapper[4953]: E1011 03:14:05.797180 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.038563 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-nffxt"] Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.047436 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-dcjsx"] Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.057251 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-nffxt"] Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.066174 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-dcjsx"] Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.074753 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-qxgn4"] Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.080762 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-qxgn4"] Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.811570 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74c049c7-ed9a-4591-a366-8a18852a3d91" path="/var/lib/kubelet/pods/74c049c7-ed9a-4591-a366-8a18852a3d91/volumes" Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.813009 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3b2030c-9a69-4ea0-9253-9bf9a6aee523" path="/var/lib/kubelet/pods/c3b2030c-9a69-4ea0-9253-9bf9a6aee523/volumes" Oct 11 03:14:07 crc kubenswrapper[4953]: I1011 03:14:07.814193 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e88244cd-5c67-4f33-b40f-58682d29da8b" path="/var/lib/kubelet/pods/e88244cd-5c67-4f33-b40f-58682d29da8b/volumes" Oct 11 03:14:17 crc kubenswrapper[4953]: I1011 03:14:17.795485 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:14:17 crc kubenswrapper[4953]: E1011 03:14:17.796732 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.384352 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6nm8w"] Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.386926 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.404302 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nm8w"] Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.453097 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rfxw\" (UniqueName: \"kubernetes.io/projected/7136d4eb-ae62-47a9-ae1b-788431793ab8-kube-api-access-9rfxw\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.453203 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-utilities\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.453270 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-catalog-content\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.555116 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rfxw\" (UniqueName: \"kubernetes.io/projected/7136d4eb-ae62-47a9-ae1b-788431793ab8-kube-api-access-9rfxw\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.555192 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-utilities\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.555226 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-catalog-content\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.555697 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-catalog-content\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.555863 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-utilities\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.574086 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6x8nq"] Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.576254 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.586653 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6x8nq"] Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.595047 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rfxw\" (UniqueName: \"kubernetes.io/projected/7136d4eb-ae62-47a9-ae1b-788431793ab8-kube-api-access-9rfxw\") pod \"redhat-marketplace-6nm8w\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.657228 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7szdg\" (UniqueName: \"kubernetes.io/projected/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-kube-api-access-7szdg\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.657292 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-catalog-content\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.657357 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-utilities\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.710902 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.758859 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7szdg\" (UniqueName: \"kubernetes.io/projected/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-kube-api-access-7szdg\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.758913 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-catalog-content\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.758966 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-utilities\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.759731 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-catalog-content\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.760066 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-utilities\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.779662 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7szdg\" (UniqueName: \"kubernetes.io/projected/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-kube-api-access-7szdg\") pod \"redhat-operators-6x8nq\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:21 crc kubenswrapper[4953]: I1011 03:14:21.937255 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.222513 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nm8w"] Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.466804 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6x8nq"] Oct 11 03:14:22 crc kubenswrapper[4953]: W1011 03:14:22.494148 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa65e0cf_fbc7_478b_beb1_bc744ae16ccb.slice/crio-0bc4f4ca0e3f0605a1dc7ea323e1c285bc60c52fae54ac9a9c768a041f2cae6a WatchSource:0}: Error finding container 0bc4f4ca0e3f0605a1dc7ea323e1c285bc60c52fae54ac9a9c768a041f2cae6a: Status 404 returned error can't find the container with id 0bc4f4ca0e3f0605a1dc7ea323e1c285bc60c52fae54ac9a9c768a041f2cae6a Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.853569 4953 generic.go:334] "Generic (PLEG): container finished" podID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerID="4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99" exitCode=0 Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.853704 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerDied","Data":"4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99"} Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.853742 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerStarted","Data":"0bc4f4ca0e3f0605a1dc7ea323e1c285bc60c52fae54ac9a9c768a041f2cae6a"} Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.864592 4953 generic.go:334] "Generic (PLEG): container finished" podID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerID="5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa" exitCode=0 Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.864672 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerDied","Data":"5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa"} Oct 11 03:14:22 crc kubenswrapper[4953]: I1011 03:14:22.864706 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerStarted","Data":"7c53fcb8fe382006f6c6014c967a916d7a0540532cb636a0d7c98503baeada2c"} Oct 11 03:14:23 crc kubenswrapper[4953]: I1011 03:14:23.877820 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerStarted","Data":"1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662"} Oct 11 03:14:23 crc kubenswrapper[4953]: I1011 03:14:23.884682 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerStarted","Data":"7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f"} Oct 11 03:14:24 crc kubenswrapper[4953]: I1011 03:14:24.896443 4953 generic.go:334] "Generic (PLEG): container finished" podID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerID="7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f" exitCode=0 Oct 11 03:14:24 crc kubenswrapper[4953]: I1011 03:14:24.896520 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerDied","Data":"7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f"} Oct 11 03:14:25 crc kubenswrapper[4953]: I1011 03:14:25.056725 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-51f7-account-create-t84pj"] Oct 11 03:14:25 crc kubenswrapper[4953]: I1011 03:14:25.071545 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-51f7-account-create-t84pj"] Oct 11 03:14:25 crc kubenswrapper[4953]: I1011 03:14:25.819816 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8590eb8-db8f-45b8-9917-4bfed4ebec04" path="/var/lib/kubelet/pods/e8590eb8-db8f-45b8-9917-4bfed4ebec04/volumes" Oct 11 03:14:25 crc kubenswrapper[4953]: I1011 03:14:25.913457 4953 generic.go:334] "Generic (PLEG): container finished" podID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerID="1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662" exitCode=0 Oct 11 03:14:25 crc kubenswrapper[4953]: I1011 03:14:25.913539 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerDied","Data":"1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662"} Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.051968 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-4f27-account-create-x95m2"] Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.062313 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-896b-account-create-cfp9q"] Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.071688 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-4f27-account-create-x95m2"] Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.081908 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-896b-account-create-cfp9q"] Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.944319 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerStarted","Data":"d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a"} Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.949149 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerStarted","Data":"ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2"} Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.971421 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6x8nq" podStartSLOduration=2.168337687 podStartE2EDuration="5.971394986s" podCreationTimestamp="2025-10-11 03:14:21 +0000 UTC" firstStartedPulling="2025-10-11 03:14:22.858882515 +0000 UTC m=+1673.791970159" lastFinishedPulling="2025-10-11 03:14:26.661939814 +0000 UTC m=+1677.595027458" observedRunningTime="2025-10-11 03:14:26.96283609 +0000 UTC m=+1677.895923734" watchObservedRunningTime="2025-10-11 03:14:26.971394986 +0000 UTC m=+1677.904482630" Oct 11 03:14:26 crc kubenswrapper[4953]: I1011 03:14:26.984334 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6nm8w" podStartSLOduration=3.522526197 podStartE2EDuration="5.984311711s" podCreationTimestamp="2025-10-11 03:14:21 +0000 UTC" firstStartedPulling="2025-10-11 03:14:22.86897954 +0000 UTC m=+1673.802067184" lastFinishedPulling="2025-10-11 03:14:25.330765014 +0000 UTC m=+1676.263852698" observedRunningTime="2025-10-11 03:14:26.983958712 +0000 UTC m=+1677.917046366" watchObservedRunningTime="2025-10-11 03:14:26.984311711 +0000 UTC m=+1677.917399355" Oct 11 03:14:27 crc kubenswrapper[4953]: I1011 03:14:27.812956 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="105a7128-d0fb-4e01-a86a-d9305eedd58b" path="/var/lib/kubelet/pods/105a7128-d0fb-4e01-a86a-d9305eedd58b/volumes" Oct 11 03:14:27 crc kubenswrapper[4953]: I1011 03:14:27.814768 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c547aa6-ce47-47ee-a11d-85f588aab4d3" path="/var/lib/kubelet/pods/9c547aa6-ce47-47ee-a11d-85f588aab4d3/volumes" Oct 11 03:14:28 crc kubenswrapper[4953]: I1011 03:14:28.796080 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:14:28 crc kubenswrapper[4953]: E1011 03:14:28.796843 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:14:31 crc kubenswrapper[4953]: I1011 03:14:31.711300 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:31 crc kubenswrapper[4953]: I1011 03:14:31.711676 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:31 crc kubenswrapper[4953]: I1011 03:14:31.770549 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:31 crc kubenswrapper[4953]: I1011 03:14:31.938941 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:31 crc kubenswrapper[4953]: I1011 03:14:31.939235 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:32 crc kubenswrapper[4953]: I1011 03:14:32.070947 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:32 crc kubenswrapper[4953]: I1011 03:14:32.163056 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nm8w"] Oct 11 03:14:33 crc kubenswrapper[4953]: I1011 03:14:33.001642 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6x8nq" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="registry-server" probeResult="failure" output=< Oct 11 03:14:33 crc kubenswrapper[4953]: timeout: failed to connect service ":50051" within 1s Oct 11 03:14:33 crc kubenswrapper[4953]: > Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.020743 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6nm8w" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="registry-server" containerID="cri-o://ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2" gracePeriod=2 Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.447552 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.521856 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-catalog-content\") pod \"7136d4eb-ae62-47a9-ae1b-788431793ab8\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.521985 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-utilities\") pod \"7136d4eb-ae62-47a9-ae1b-788431793ab8\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.522174 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rfxw\" (UniqueName: \"kubernetes.io/projected/7136d4eb-ae62-47a9-ae1b-788431793ab8-kube-api-access-9rfxw\") pod \"7136d4eb-ae62-47a9-ae1b-788431793ab8\" (UID: \"7136d4eb-ae62-47a9-ae1b-788431793ab8\") " Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.524961 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-utilities" (OuterVolumeSpecName: "utilities") pod "7136d4eb-ae62-47a9-ae1b-788431793ab8" (UID: "7136d4eb-ae62-47a9-ae1b-788431793ab8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.529156 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7136d4eb-ae62-47a9-ae1b-788431793ab8-kube-api-access-9rfxw" (OuterVolumeSpecName: "kube-api-access-9rfxw") pod "7136d4eb-ae62-47a9-ae1b-788431793ab8" (UID: "7136d4eb-ae62-47a9-ae1b-788431793ab8"). InnerVolumeSpecName "kube-api-access-9rfxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.548503 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7136d4eb-ae62-47a9-ae1b-788431793ab8" (UID: "7136d4eb-ae62-47a9-ae1b-788431793ab8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.624127 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rfxw\" (UniqueName: \"kubernetes.io/projected/7136d4eb-ae62-47a9-ae1b-788431793ab8-kube-api-access-9rfxw\") on node \"crc\" DevicePath \"\"" Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.624175 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:14:34 crc kubenswrapper[4953]: I1011 03:14:34.624244 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7136d4eb-ae62-47a9-ae1b-788431793ab8-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.034493 4953 generic.go:334] "Generic (PLEG): container finished" podID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerID="ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2" exitCode=0 Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.034597 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerDied","Data":"ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2"} Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.035725 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6nm8w" event={"ID":"7136d4eb-ae62-47a9-ae1b-788431793ab8","Type":"ContainerDied","Data":"7c53fcb8fe382006f6c6014c967a916d7a0540532cb636a0d7c98503baeada2c"} Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.034632 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6nm8w" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.035753 4953 scope.go:117] "RemoveContainer" containerID="ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.055566 4953 scope.go:117] "RemoveContainer" containerID="7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.078696 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nm8w"] Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.080715 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6nm8w"] Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.111075 4953 scope.go:117] "RemoveContainer" containerID="5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.148729 4953 scope.go:117] "RemoveContainer" containerID="ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2" Oct 11 03:14:35 crc kubenswrapper[4953]: E1011 03:14:35.149526 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2\": container with ID starting with ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2 not found: ID does not exist" containerID="ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.149564 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2"} err="failed to get container status \"ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2\": rpc error: code = NotFound desc = could not find container \"ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2\": container with ID starting with ae19e7cbe51eae470afab468c62b8b00156386367cddbb526fcc92681ce475b2 not found: ID does not exist" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.149591 4953 scope.go:117] "RemoveContainer" containerID="7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f" Oct 11 03:14:35 crc kubenswrapper[4953]: E1011 03:14:35.149975 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f\": container with ID starting with 7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f not found: ID does not exist" containerID="7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.150002 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f"} err="failed to get container status \"7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f\": rpc error: code = NotFound desc = could not find container \"7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f\": container with ID starting with 7f075f5e3a5604d2981c0d576722ff00e30c2f955fed6f652c58908916818f8f not found: ID does not exist" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.150033 4953 scope.go:117] "RemoveContainer" containerID="5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa" Oct 11 03:14:35 crc kubenswrapper[4953]: E1011 03:14:35.150348 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa\": container with ID starting with 5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa not found: ID does not exist" containerID="5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.150373 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa"} err="failed to get container status \"5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa\": rpc error: code = NotFound desc = could not find container \"5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa\": container with ID starting with 5d06802de330cd04a90cba90a6c104393960ddb9ef7ae7a5663a27b3a258a0fa not found: ID does not exist" Oct 11 03:14:35 crc kubenswrapper[4953]: I1011 03:14:35.812863 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" path="/var/lib/kubelet/pods/7136d4eb-ae62-47a9-ae1b-788431793ab8/volumes" Oct 11 03:14:40 crc kubenswrapper[4953]: I1011 03:14:40.795916 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:14:40 crc kubenswrapper[4953]: E1011 03:14:40.797885 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:14:42 crc kubenswrapper[4953]: I1011 03:14:42.021772 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:42 crc kubenswrapper[4953]: I1011 03:14:42.083332 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:42 crc kubenswrapper[4953]: I1011 03:14:42.262112 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6x8nq"] Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.175057 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6x8nq" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="registry-server" containerID="cri-o://d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a" gracePeriod=2 Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.650355 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.751508 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-catalog-content\") pod \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.751686 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7szdg\" (UniqueName: \"kubernetes.io/projected/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-kube-api-access-7szdg\") pod \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.751755 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-utilities\") pod \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\" (UID: \"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb\") " Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.752247 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-utilities" (OuterVolumeSpecName: "utilities") pod "fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" (UID: "fa65e0cf-fbc7-478b-beb1-bc744ae16ccb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.752521 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.757853 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-kube-api-access-7szdg" (OuterVolumeSpecName: "kube-api-access-7szdg") pod "fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" (UID: "fa65e0cf-fbc7-478b-beb1-bc744ae16ccb"). InnerVolumeSpecName "kube-api-access-7szdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.828956 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" (UID: "fa65e0cf-fbc7-478b-beb1-bc744ae16ccb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.854434 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:14:43 crc kubenswrapper[4953]: I1011 03:14:43.854485 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7szdg\" (UniqueName: \"kubernetes.io/projected/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb-kube-api-access-7szdg\") on node \"crc\" DevicePath \"\"" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.188699 4953 generic.go:334] "Generic (PLEG): container finished" podID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerID="d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a" exitCode=0 Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.188909 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerDied","Data":"d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a"} Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.189824 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6x8nq" event={"ID":"fa65e0cf-fbc7-478b-beb1-bc744ae16ccb","Type":"ContainerDied","Data":"0bc4f4ca0e3f0605a1dc7ea323e1c285bc60c52fae54ac9a9c768a041f2cae6a"} Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.188996 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6x8nq" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.189861 4953 scope.go:117] "RemoveContainer" containerID="d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.223684 4953 scope.go:117] "RemoveContainer" containerID="1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.243682 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6x8nq"] Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.252218 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6x8nq"] Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.253790 4953 scope.go:117] "RemoveContainer" containerID="4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.297843 4953 scope.go:117] "RemoveContainer" containerID="d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a" Oct 11 03:14:44 crc kubenswrapper[4953]: E1011 03:14:44.298338 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a\": container with ID starting with d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a not found: ID does not exist" containerID="d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.298387 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a"} err="failed to get container status \"d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a\": rpc error: code = NotFound desc = could not find container \"d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a\": container with ID starting with d981c3aca698d2b29b0f15285e7394243b6b858aca963b981139b9bd8ea65f5a not found: ID does not exist" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.298408 4953 scope.go:117] "RemoveContainer" containerID="1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662" Oct 11 03:14:44 crc kubenswrapper[4953]: E1011 03:14:44.298698 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662\": container with ID starting with 1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662 not found: ID does not exist" containerID="1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.298722 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662"} err="failed to get container status \"1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662\": rpc error: code = NotFound desc = could not find container \"1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662\": container with ID starting with 1ada4d8492090391d979c480ddca082b92352b04b5fa7a82e6a108ae5ea63662 not found: ID does not exist" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.298735 4953 scope.go:117] "RemoveContainer" containerID="4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99" Oct 11 03:14:44 crc kubenswrapper[4953]: E1011 03:14:44.299083 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99\": container with ID starting with 4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99 not found: ID does not exist" containerID="4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99" Oct 11 03:14:44 crc kubenswrapper[4953]: I1011 03:14:44.299111 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99"} err="failed to get container status \"4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99\": rpc error: code = NotFound desc = could not find container \"4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99\": container with ID starting with 4c50c366312b8170a18e3f89b39fed487dbb49b303b501cd7318617308b8dd99 not found: ID does not exist" Oct 11 03:14:45 crc kubenswrapper[4953]: I1011 03:14:45.833291 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" path="/var/lib/kubelet/pods/fa65e0cf-fbc7-478b-beb1-bc744ae16ccb/volumes" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.047404 4953 scope.go:117] "RemoveContainer" containerID="e7fd23c4fbb9377d9b6440704658cf9ae063f20bfa63b936b1e69d01670f2b44" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.069908 4953 scope.go:117] "RemoveContainer" containerID="b78d5429605377d8a93204dd0cf0ddbffb46ac8f11b4155f41cb9d306478c3d4" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.142310 4953 scope.go:117] "RemoveContainer" containerID="0920be9a5b17748cc144d82da50835e7d388ec1bb67bfd03e11b0b1b706b777d" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.187648 4953 scope.go:117] "RemoveContainer" containerID="58e34fa26fc115520c322fe73fa97e032b15f5bca52328e61bfa8306f6362364" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.221389 4953 scope.go:117] "RemoveContainer" containerID="76e6f04613ca2f0a55487af8a9fc0eeda75161b1a86fd0eec89bed9bdc5711a7" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.256239 4953 scope.go:117] "RemoveContainer" containerID="503ac283086d6618a753d7690294f54d4e579cc28d6ad20f3c4ce4d2f72c0dd3" Oct 11 03:14:48 crc kubenswrapper[4953]: I1011 03:14:48.309836 4953 scope.go:117] "RemoveContainer" containerID="f6c3cc08ac704e094ce2446a9df093b9941b90ff5f070bead94fd17f67a5acbb" Oct 11 03:14:49 crc kubenswrapper[4953]: I1011 03:14:49.045225 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr9q7"] Oct 11 03:14:49 crc kubenswrapper[4953]: I1011 03:14:49.061744 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nr9q7"] Oct 11 03:14:49 crc kubenswrapper[4953]: I1011 03:14:49.806829 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3af0283-8529-49e1-ad8a-984617204bed" path="/var/lib/kubelet/pods/e3af0283-8529-49e1-ad8a-984617204bed/volumes" Oct 11 03:14:54 crc kubenswrapper[4953]: I1011 03:14:54.796199 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:14:54 crc kubenswrapper[4953]: E1011 03:14:54.797242 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.156223 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g"] Oct 11 03:15:00 crc kubenswrapper[4953]: E1011 03:15:00.157076 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="extract-content" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157089 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="extract-content" Oct 11 03:15:00 crc kubenswrapper[4953]: E1011 03:15:00.157101 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="extract-utilities" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157107 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="extract-utilities" Oct 11 03:15:00 crc kubenswrapper[4953]: E1011 03:15:00.157121 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="registry-server" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157126 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="registry-server" Oct 11 03:15:00 crc kubenswrapper[4953]: E1011 03:15:00.157135 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="extract-content" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157141 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="extract-content" Oct 11 03:15:00 crc kubenswrapper[4953]: E1011 03:15:00.157152 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="extract-utilities" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157157 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="extract-utilities" Oct 11 03:15:00 crc kubenswrapper[4953]: E1011 03:15:00.157178 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="registry-server" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157184 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="registry-server" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157355 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7136d4eb-ae62-47a9-ae1b-788431793ab8" containerName="registry-server" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.157371 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa65e0cf-fbc7-478b-beb1-bc744ae16ccb" containerName="registry-server" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.158022 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.162639 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.166394 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.168770 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g"] Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.271441 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/50a20ae2-84b9-427c-a9be-419da263288a-secret-volume\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.271507 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdhdv\" (UniqueName: \"kubernetes.io/projected/50a20ae2-84b9-427c-a9be-419da263288a-kube-api-access-mdhdv\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.271575 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50a20ae2-84b9-427c-a9be-419da263288a-config-volume\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.373657 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50a20ae2-84b9-427c-a9be-419da263288a-config-volume\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.373807 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/50a20ae2-84b9-427c-a9be-419da263288a-secret-volume\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.373860 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdhdv\" (UniqueName: \"kubernetes.io/projected/50a20ae2-84b9-427c-a9be-419da263288a-kube-api-access-mdhdv\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.375917 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50a20ae2-84b9-427c-a9be-419da263288a-config-volume\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.390110 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/50a20ae2-84b9-427c-a9be-419da263288a-secret-volume\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.390328 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdhdv\" (UniqueName: \"kubernetes.io/projected/50a20ae2-84b9-427c-a9be-419da263288a-kube-api-access-mdhdv\") pod \"collect-profiles-29335875-gk84g\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.479516 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:00 crc kubenswrapper[4953]: I1011 03:15:00.910300 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g"] Oct 11 03:15:01 crc kubenswrapper[4953]: I1011 03:15:01.354451 4953 generic.go:334] "Generic (PLEG): container finished" podID="50a20ae2-84b9-427c-a9be-419da263288a" containerID="d663d9b16b41ee640d64a358fc6a6bda62c863f6ab13ff1b7e9039822521cd56" exitCode=0 Oct 11 03:15:01 crc kubenswrapper[4953]: I1011 03:15:01.354543 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" event={"ID":"50a20ae2-84b9-427c-a9be-419da263288a","Type":"ContainerDied","Data":"d663d9b16b41ee640d64a358fc6a6bda62c863f6ab13ff1b7e9039822521cd56"} Oct 11 03:15:01 crc kubenswrapper[4953]: I1011 03:15:01.354814 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" event={"ID":"50a20ae2-84b9-427c-a9be-419da263288a","Type":"ContainerStarted","Data":"729f2b8b364bb4e5c45944de4951669f443493376739338d290ec0e26ad4a07a"} Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.641721 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.715699 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50a20ae2-84b9-427c-a9be-419da263288a-config-volume\") pod \"50a20ae2-84b9-427c-a9be-419da263288a\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.715900 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/50a20ae2-84b9-427c-a9be-419da263288a-secret-volume\") pod \"50a20ae2-84b9-427c-a9be-419da263288a\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.715939 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdhdv\" (UniqueName: \"kubernetes.io/projected/50a20ae2-84b9-427c-a9be-419da263288a-kube-api-access-mdhdv\") pod \"50a20ae2-84b9-427c-a9be-419da263288a\" (UID: \"50a20ae2-84b9-427c-a9be-419da263288a\") " Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.716753 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50a20ae2-84b9-427c-a9be-419da263288a-config-volume" (OuterVolumeSpecName: "config-volume") pod "50a20ae2-84b9-427c-a9be-419da263288a" (UID: "50a20ae2-84b9-427c-a9be-419da263288a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.722675 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50a20ae2-84b9-427c-a9be-419da263288a-kube-api-access-mdhdv" (OuterVolumeSpecName: "kube-api-access-mdhdv") pod "50a20ae2-84b9-427c-a9be-419da263288a" (UID: "50a20ae2-84b9-427c-a9be-419da263288a"). InnerVolumeSpecName "kube-api-access-mdhdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.722687 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50a20ae2-84b9-427c-a9be-419da263288a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "50a20ae2-84b9-427c-a9be-419da263288a" (UID: "50a20ae2-84b9-427c-a9be-419da263288a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.826287 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/50a20ae2-84b9-427c-a9be-419da263288a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.826501 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/50a20ae2-84b9-427c-a9be-419da263288a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:15:02 crc kubenswrapper[4953]: I1011 03:15:02.826511 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdhdv\" (UniqueName: \"kubernetes.io/projected/50a20ae2-84b9-427c-a9be-419da263288a-kube-api-access-mdhdv\") on node \"crc\" DevicePath \"\"" Oct 11 03:15:03 crc kubenswrapper[4953]: I1011 03:15:03.369901 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" event={"ID":"50a20ae2-84b9-427c-a9be-419da263288a","Type":"ContainerDied","Data":"729f2b8b364bb4e5c45944de4951669f443493376739338d290ec0e26ad4a07a"} Oct 11 03:15:03 crc kubenswrapper[4953]: I1011 03:15:03.369938 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="729f2b8b364bb4e5c45944de4951669f443493376739338d290ec0e26ad4a07a" Oct 11 03:15:03 crc kubenswrapper[4953]: I1011 03:15:03.369967 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g" Oct 11 03:15:05 crc kubenswrapper[4953]: I1011 03:15:05.795975 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:15:05 crc kubenswrapper[4953]: E1011 03:15:05.796772 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:15:11 crc kubenswrapper[4953]: I1011 03:15:11.061717 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-p9vc4"] Oct 11 03:15:11 crc kubenswrapper[4953]: I1011 03:15:11.071595 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-p9vc4"] Oct 11 03:15:11 crc kubenswrapper[4953]: I1011 03:15:11.811480 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07c44d49-ff7d-45bc-af7e-55eee19b672b" path="/var/lib/kubelet/pods/07c44d49-ff7d-45bc-af7e-55eee19b672b/volumes" Oct 11 03:15:14 crc kubenswrapper[4953]: I1011 03:15:14.029748 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-m2j4v"] Oct 11 03:15:14 crc kubenswrapper[4953]: I1011 03:15:14.035006 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-m2j4v"] Oct 11 03:15:15 crc kubenswrapper[4953]: I1011 03:15:15.813069 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4149b77-aada-42e3-b8ad-4392fb1e7c0d" path="/var/lib/kubelet/pods/b4149b77-aada-42e3-b8ad-4392fb1e7c0d/volumes" Oct 11 03:15:17 crc kubenswrapper[4953]: I1011 03:15:17.795789 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:15:17 crc kubenswrapper[4953]: E1011 03:15:17.796672 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:15:28 crc kubenswrapper[4953]: I1011 03:15:28.796497 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:15:28 crc kubenswrapper[4953]: E1011 03:15:28.797429 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:15:41 crc kubenswrapper[4953]: I1011 03:15:41.795970 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:15:41 crc kubenswrapper[4953]: E1011 03:15:41.797241 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:15:46 crc kubenswrapper[4953]: I1011 03:15:46.850803 4953 generic.go:334] "Generic (PLEG): container finished" podID="baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" containerID="cec342b479262ba7961555f0901452632cf6452ee3f1f2529f092edc78bfa3a6" exitCode=2 Oct 11 03:15:46 crc kubenswrapper[4953]: I1011 03:15:46.850896 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" event={"ID":"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3","Type":"ContainerDied","Data":"cec342b479262ba7961555f0901452632cf6452ee3f1f2529f092edc78bfa3a6"} Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.324336 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.395844 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-ssh-key\") pod \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.396036 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2vsq\" (UniqueName: \"kubernetes.io/projected/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-kube-api-access-b2vsq\") pod \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.396198 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-inventory\") pod \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\" (UID: \"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3\") " Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.403801 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-kube-api-access-b2vsq" (OuterVolumeSpecName: "kube-api-access-b2vsq") pod "baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" (UID: "baae5cb7-14c0-4367-ab6f-ecb4a323c6c3"). InnerVolumeSpecName "kube-api-access-b2vsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.429310 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" (UID: "baae5cb7-14c0-4367-ab6f-ecb4a323c6c3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.442313 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-inventory" (OuterVolumeSpecName: "inventory") pod "baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" (UID: "baae5cb7-14c0-4367-ab6f-ecb4a323c6c3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.472249 4953 scope.go:117] "RemoveContainer" containerID="4d61c9be24d437e306c830d10952fc1442143406d7410bcddefa1790e6fac94b" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.498948 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.499043 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.499059 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2vsq\" (UniqueName: \"kubernetes.io/projected/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3-kube-api-access-b2vsq\") on node \"crc\" DevicePath \"\"" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.568549 4953 scope.go:117] "RemoveContainer" containerID="88ee3e6018fa7012cc7681a03d446b012eb60108987c9c7c24f472d52bf8ed0b" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.615911 4953 scope.go:117] "RemoveContainer" containerID="8ed69d6ce145394755f7621e0905e63cf33d227d43b073cef013912d3ad0747d" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.878243 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" event={"ID":"baae5cb7-14c0-4367-ab6f-ecb4a323c6c3","Type":"ContainerDied","Data":"7324c2536cf381bf0e7e14944b718c02cc40754c2f2d21f782dd8eb10641ef23"} Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.878316 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7324c2536cf381bf0e7e14944b718c02cc40754c2f2d21f782dd8eb10641ef23" Oct 11 03:15:48 crc kubenswrapper[4953]: I1011 03:15:48.878379 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.067715 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc"] Oct 11 03:15:55 crc kubenswrapper[4953]: E1011 03:15:55.068979 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a20ae2-84b9-427c-a9be-419da263288a" containerName="collect-profiles" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.069005 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a20ae2-84b9-427c-a9be-419da263288a" containerName="collect-profiles" Oct 11 03:15:55 crc kubenswrapper[4953]: E1011 03:15:55.069032 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.069049 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.069419 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.069471 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="50a20ae2-84b9-427c-a9be-419da263288a" containerName="collect-profiles" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.076048 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.078284 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.080406 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc"] Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.081107 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.081217 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.081665 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.257421 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx5vg\" (UniqueName: \"kubernetes.io/projected/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-kube-api-access-fx5vg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.257565 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.258011 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.361179 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx5vg\" (UniqueName: \"kubernetes.io/projected/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-kube-api-access-fx5vg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.361389 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.361526 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.374994 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.379177 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.387868 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx5vg\" (UniqueName: \"kubernetes.io/projected/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-kube-api-access-fx5vg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.418319 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.796760 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:15:55 crc kubenswrapper[4953]: E1011 03:15:55.797814 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.823777 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc"] Oct 11 03:15:55 crc kubenswrapper[4953]: I1011 03:15:55.958368 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" event={"ID":"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3","Type":"ContainerStarted","Data":"15547f25915146e9f4247ec32d30d6d98d6ef1f0734e622b126a256131849b5d"} Oct 11 03:15:56 crc kubenswrapper[4953]: I1011 03:15:56.970893 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" event={"ID":"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3","Type":"ContainerStarted","Data":"e643b7e79ff51a390869993e1ae2fc62c6bbd5768ffb5a949291991419d24752"} Oct 11 03:15:56 crc kubenswrapper[4953]: I1011 03:15:56.988976 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" podStartSLOduration=1.5608039900000001 podStartE2EDuration="1.988956234s" podCreationTimestamp="2025-10-11 03:15:55 +0000 UTC" firstStartedPulling="2025-10-11 03:15:55.834424947 +0000 UTC m=+1766.767512581" lastFinishedPulling="2025-10-11 03:15:56.262577171 +0000 UTC m=+1767.195664825" observedRunningTime="2025-10-11 03:15:56.985159309 +0000 UTC m=+1767.918246963" watchObservedRunningTime="2025-10-11 03:15:56.988956234 +0000 UTC m=+1767.922043878" Oct 11 03:15:57 crc kubenswrapper[4953]: I1011 03:15:57.050800 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rn257"] Oct 11 03:15:57 crc kubenswrapper[4953]: I1011 03:15:57.063638 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rn257"] Oct 11 03:15:57 crc kubenswrapper[4953]: I1011 03:15:57.806323 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ea69ea4-ed2c-4527-94de-981b609f1cca" path="/var/lib/kubelet/pods/3ea69ea4-ed2c-4527-94de-981b609f1cca/volumes" Oct 11 03:16:07 crc kubenswrapper[4953]: I1011 03:16:07.796885 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:16:07 crc kubenswrapper[4953]: E1011 03:16:07.798407 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:16:21 crc kubenswrapper[4953]: I1011 03:16:21.796819 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:16:21 crc kubenswrapper[4953]: E1011 03:16:21.798196 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:16:35 crc kubenswrapper[4953]: I1011 03:16:35.795939 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:16:35 crc kubenswrapper[4953]: E1011 03:16:35.796993 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:16:47 crc kubenswrapper[4953]: I1011 03:16:47.796936 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:16:47 crc kubenswrapper[4953]: E1011 03:16:47.798171 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:16:48 crc kubenswrapper[4953]: I1011 03:16:48.746126 4953 scope.go:117] "RemoveContainer" containerID="5ed42581fe9042b94858040ec0efad065ffe12a0f61c84ff6bb546094183dcfb" Oct 11 03:16:50 crc kubenswrapper[4953]: I1011 03:16:50.693150 4953 generic.go:334] "Generic (PLEG): container finished" podID="9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" containerID="e643b7e79ff51a390869993e1ae2fc62c6bbd5768ffb5a949291991419d24752" exitCode=0 Oct 11 03:16:50 crc kubenswrapper[4953]: I1011 03:16:50.693257 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" event={"ID":"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3","Type":"ContainerDied","Data":"e643b7e79ff51a390869993e1ae2fc62c6bbd5768ffb5a949291991419d24752"} Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.193483 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.320717 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-inventory\") pod \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.320905 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-ssh-key\") pod \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.320942 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx5vg\" (UniqueName: \"kubernetes.io/projected/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-kube-api-access-fx5vg\") pod \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\" (UID: \"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3\") " Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.331692 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-kube-api-access-fx5vg" (OuterVolumeSpecName: "kube-api-access-fx5vg") pod "9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" (UID: "9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3"). InnerVolumeSpecName "kube-api-access-fx5vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.349757 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-inventory" (OuterVolumeSpecName: "inventory") pod "9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" (UID: "9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.350714 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" (UID: "9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.424149 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.424182 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.424191 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx5vg\" (UniqueName: \"kubernetes.io/projected/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3-kube-api-access-fx5vg\") on node \"crc\" DevicePath \"\"" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.719987 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" event={"ID":"9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3","Type":"ContainerDied","Data":"15547f25915146e9f4247ec32d30d6d98d6ef1f0734e622b126a256131849b5d"} Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.720030 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15547f25915146e9f4247ec32d30d6d98d6ef1f0734e622b126a256131849b5d" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.720101 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.841083 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4s56g"] Oct 11 03:16:52 crc kubenswrapper[4953]: E1011 03:16:52.841507 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.841541 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.841790 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.842457 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.848047 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.849114 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.849424 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.850022 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.856070 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4s56g"] Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.932194 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.933239 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:52 crc kubenswrapper[4953]: I1011 03:16:52.933410 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvpjr\" (UniqueName: \"kubernetes.io/projected/8046051a-f405-4a56-95dd-a89214b321f1-kube-api-access-zvpjr\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.035612 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.035813 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.035849 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvpjr\" (UniqueName: \"kubernetes.io/projected/8046051a-f405-4a56-95dd-a89214b321f1-kube-api-access-zvpjr\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.040782 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.041178 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.107414 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvpjr\" (UniqueName: \"kubernetes.io/projected/8046051a-f405-4a56-95dd-a89214b321f1-kube-api-access-zvpjr\") pod \"ssh-known-hosts-edpm-deployment-4s56g\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.166065 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:16:53 crc kubenswrapper[4953]: I1011 03:16:53.768405 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4s56g"] Oct 11 03:16:54 crc kubenswrapper[4953]: I1011 03:16:54.743220 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" event={"ID":"8046051a-f405-4a56-95dd-a89214b321f1","Type":"ContainerStarted","Data":"d19d84b3d7e5326e5e61b4acb322d65ef87c1c907940b0c5815849fb00fa03f3"} Oct 11 03:16:54 crc kubenswrapper[4953]: I1011 03:16:54.743722 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" event={"ID":"8046051a-f405-4a56-95dd-a89214b321f1","Type":"ContainerStarted","Data":"46686e519cb9454f7ded1375e2de3aec6e3a6e2c3d6152a266ad845db1de1a3d"} Oct 11 03:16:54 crc kubenswrapper[4953]: I1011 03:16:54.772644 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" podStartSLOduration=2.155198724 podStartE2EDuration="2.772590899s" podCreationTimestamp="2025-10-11 03:16:52 +0000 UTC" firstStartedPulling="2025-10-11 03:16:53.783321128 +0000 UTC m=+1824.716408812" lastFinishedPulling="2025-10-11 03:16:54.400713343 +0000 UTC m=+1825.333800987" observedRunningTime="2025-10-11 03:16:54.763188622 +0000 UTC m=+1825.696276266" watchObservedRunningTime="2025-10-11 03:16:54.772590899 +0000 UTC m=+1825.705678563" Oct 11 03:16:58 crc kubenswrapper[4953]: I1011 03:16:58.795464 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:16:58 crc kubenswrapper[4953]: E1011 03:16:58.797306 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:17:02 crc kubenswrapper[4953]: I1011 03:17:02.875630 4953 generic.go:334] "Generic (PLEG): container finished" podID="8046051a-f405-4a56-95dd-a89214b321f1" containerID="d19d84b3d7e5326e5e61b4acb322d65ef87c1c907940b0c5815849fb00fa03f3" exitCode=0 Oct 11 03:17:02 crc kubenswrapper[4953]: I1011 03:17:02.875710 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" event={"ID":"8046051a-f405-4a56-95dd-a89214b321f1","Type":"ContainerDied","Data":"d19d84b3d7e5326e5e61b4acb322d65ef87c1c907940b0c5815849fb00fa03f3"} Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.359891 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.535508 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-ssh-key-openstack-edpm-ipam\") pod \"8046051a-f405-4a56-95dd-a89214b321f1\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.536231 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-inventory-0\") pod \"8046051a-f405-4a56-95dd-a89214b321f1\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.536292 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvpjr\" (UniqueName: \"kubernetes.io/projected/8046051a-f405-4a56-95dd-a89214b321f1-kube-api-access-zvpjr\") pod \"8046051a-f405-4a56-95dd-a89214b321f1\" (UID: \"8046051a-f405-4a56-95dd-a89214b321f1\") " Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.544447 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8046051a-f405-4a56-95dd-a89214b321f1-kube-api-access-zvpjr" (OuterVolumeSpecName: "kube-api-access-zvpjr") pod "8046051a-f405-4a56-95dd-a89214b321f1" (UID: "8046051a-f405-4a56-95dd-a89214b321f1"). InnerVolumeSpecName "kube-api-access-zvpjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.564129 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "8046051a-f405-4a56-95dd-a89214b321f1" (UID: "8046051a-f405-4a56-95dd-a89214b321f1"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.564166 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "8046051a-f405-4a56-95dd-a89214b321f1" (UID: "8046051a-f405-4a56-95dd-a89214b321f1"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.639039 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.639079 4953 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/8046051a-f405-4a56-95dd-a89214b321f1-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.639092 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvpjr\" (UniqueName: \"kubernetes.io/projected/8046051a-f405-4a56-95dd-a89214b321f1-kube-api-access-zvpjr\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.903210 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" event={"ID":"8046051a-f405-4a56-95dd-a89214b321f1","Type":"ContainerDied","Data":"46686e519cb9454f7ded1375e2de3aec6e3a6e2c3d6152a266ad845db1de1a3d"} Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.903284 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46686e519cb9454f7ded1375e2de3aec6e3a6e2c3d6152a266ad845db1de1a3d" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.903372 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-4s56g" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.996742 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq"] Oct 11 03:17:04 crc kubenswrapper[4953]: E1011 03:17:04.997359 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8046051a-f405-4a56-95dd-a89214b321f1" containerName="ssh-known-hosts-edpm-deployment" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.997385 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="8046051a-f405-4a56-95dd-a89214b321f1" containerName="ssh-known-hosts-edpm-deployment" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.997718 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="8046051a-f405-4a56-95dd-a89214b321f1" containerName="ssh-known-hosts-edpm-deployment" Oct 11 03:17:04 crc kubenswrapper[4953]: I1011 03:17:04.998593 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.003051 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.006045 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.008080 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.008424 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.011592 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq"] Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.158840 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.159984 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.160781 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbchx\" (UniqueName: \"kubernetes.io/projected/2f235983-7690-4d9b-89a7-6f9d873b80b4-kube-api-access-sbchx\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.262124 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbchx\" (UniqueName: \"kubernetes.io/projected/2f235983-7690-4d9b-89a7-6f9d873b80b4-kube-api-access-sbchx\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.262221 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.262249 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.270165 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.278661 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.295142 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbchx\" (UniqueName: \"kubernetes.io/projected/2f235983-7690-4d9b-89a7-6f9d873b80b4-kube-api-access-sbchx\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c7btq\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.320831 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.757470 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq"] Oct 11 03:17:05 crc kubenswrapper[4953]: I1011 03:17:05.916300 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" event={"ID":"2f235983-7690-4d9b-89a7-6f9d873b80b4","Type":"ContainerStarted","Data":"2b314b6b2cc2e39ff85172ea3aca0f2478b6ccf73df22f4117dd735f08a110eb"} Oct 11 03:17:06 crc kubenswrapper[4953]: I1011 03:17:06.936374 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" event={"ID":"2f235983-7690-4d9b-89a7-6f9d873b80b4","Type":"ContainerStarted","Data":"534df06864b09e04009755ea8df43728c31ccade5483ea4d7df96caed9b59fc7"} Oct 11 03:17:06 crc kubenswrapper[4953]: I1011 03:17:06.958033 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" podStartSLOduration=2.5346458099999998 podStartE2EDuration="2.958003983s" podCreationTimestamp="2025-10-11 03:17:04 +0000 UTC" firstStartedPulling="2025-10-11 03:17:05.773526592 +0000 UTC m=+1836.706614266" lastFinishedPulling="2025-10-11 03:17:06.196884755 +0000 UTC m=+1837.129972439" observedRunningTime="2025-10-11 03:17:06.955796858 +0000 UTC m=+1837.888884562" watchObservedRunningTime="2025-10-11 03:17:06.958003983 +0000 UTC m=+1837.891091667" Oct 11 03:17:12 crc kubenswrapper[4953]: I1011 03:17:12.795317 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:17:14 crc kubenswrapper[4953]: I1011 03:17:14.016063 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"822960e558e9bbd8c59cb80cc0b26e882a882ee26095ad53abee11e91f461b31"} Oct 11 03:17:16 crc kubenswrapper[4953]: I1011 03:17:16.042277 4953 generic.go:334] "Generic (PLEG): container finished" podID="2f235983-7690-4d9b-89a7-6f9d873b80b4" containerID="534df06864b09e04009755ea8df43728c31ccade5483ea4d7df96caed9b59fc7" exitCode=0 Oct 11 03:17:16 crc kubenswrapper[4953]: I1011 03:17:16.042400 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" event={"ID":"2f235983-7690-4d9b-89a7-6f9d873b80b4","Type":"ContainerDied","Data":"534df06864b09e04009755ea8df43728c31ccade5483ea4d7df96caed9b59fc7"} Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.527432 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.646405 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbchx\" (UniqueName: \"kubernetes.io/projected/2f235983-7690-4d9b-89a7-6f9d873b80b4-kube-api-access-sbchx\") pod \"2f235983-7690-4d9b-89a7-6f9d873b80b4\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.647730 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-inventory\") pod \"2f235983-7690-4d9b-89a7-6f9d873b80b4\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.648065 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-ssh-key\") pod \"2f235983-7690-4d9b-89a7-6f9d873b80b4\" (UID: \"2f235983-7690-4d9b-89a7-6f9d873b80b4\") " Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.656292 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f235983-7690-4d9b-89a7-6f9d873b80b4-kube-api-access-sbchx" (OuterVolumeSpecName: "kube-api-access-sbchx") pod "2f235983-7690-4d9b-89a7-6f9d873b80b4" (UID: "2f235983-7690-4d9b-89a7-6f9d873b80b4"). InnerVolumeSpecName "kube-api-access-sbchx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.691953 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-inventory" (OuterVolumeSpecName: "inventory") pod "2f235983-7690-4d9b-89a7-6f9d873b80b4" (UID: "2f235983-7690-4d9b-89a7-6f9d873b80b4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.701965 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2f235983-7690-4d9b-89a7-6f9d873b80b4" (UID: "2f235983-7690-4d9b-89a7-6f9d873b80b4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.752221 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbchx\" (UniqueName: \"kubernetes.io/projected/2f235983-7690-4d9b-89a7-6f9d873b80b4-kube-api-access-sbchx\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.752267 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:17 crc kubenswrapper[4953]: I1011 03:17:17.752281 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2f235983-7690-4d9b-89a7-6f9d873b80b4-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.066757 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" event={"ID":"2f235983-7690-4d9b-89a7-6f9d873b80b4","Type":"ContainerDied","Data":"2b314b6b2cc2e39ff85172ea3aca0f2478b6ccf73df22f4117dd735f08a110eb"} Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.066808 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b314b6b2cc2e39ff85172ea3aca0f2478b6ccf73df22f4117dd735f08a110eb" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.066807 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.187023 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs"] Oct 11 03:17:18 crc kubenswrapper[4953]: E1011 03:17:18.187465 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f235983-7690-4d9b-89a7-6f9d873b80b4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.187487 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f235983-7690-4d9b-89a7-6f9d873b80b4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.187725 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f235983-7690-4d9b-89a7-6f9d873b80b4" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.188428 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.191381 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.191427 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.192689 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.194571 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.206829 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs"] Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.265442 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.265511 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw8nd\" (UniqueName: \"kubernetes.io/projected/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-kube-api-access-sw8nd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.265753 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.367921 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.368038 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.368070 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw8nd\" (UniqueName: \"kubernetes.io/projected/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-kube-api-access-sw8nd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.379781 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.379860 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.389322 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw8nd\" (UniqueName: \"kubernetes.io/projected/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-kube-api-access-sw8nd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.515715 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:18 crc kubenswrapper[4953]: I1011 03:17:18.908182 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs"] Oct 11 03:17:19 crc kubenswrapper[4953]: I1011 03:17:19.076941 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" event={"ID":"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31","Type":"ContainerStarted","Data":"e39fa4e55279be9570a5742b147041fcb354398d21dc2f22edcfe6e31c8d0883"} Oct 11 03:17:20 crc kubenswrapper[4953]: I1011 03:17:20.091147 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" event={"ID":"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31","Type":"ContainerStarted","Data":"4902b756d16a2d757f23db0d305dd72b3b77d24818e58b3e74e50e99b00d2dc3"} Oct 11 03:17:20 crc kubenswrapper[4953]: I1011 03:17:20.112835 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" podStartSLOduration=1.676627211 podStartE2EDuration="2.112809868s" podCreationTimestamp="2025-10-11 03:17:18 +0000 UTC" firstStartedPulling="2025-10-11 03:17:18.926259363 +0000 UTC m=+1849.859347017" lastFinishedPulling="2025-10-11 03:17:19.36244203 +0000 UTC m=+1850.295529674" observedRunningTime="2025-10-11 03:17:20.111525005 +0000 UTC m=+1851.044612689" watchObservedRunningTime="2025-10-11 03:17:20.112809868 +0000 UTC m=+1851.045897532" Oct 11 03:17:30 crc kubenswrapper[4953]: I1011 03:17:30.216171 4953 generic.go:334] "Generic (PLEG): container finished" podID="1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" containerID="4902b756d16a2d757f23db0d305dd72b3b77d24818e58b3e74e50e99b00d2dc3" exitCode=0 Oct 11 03:17:30 crc kubenswrapper[4953]: I1011 03:17:30.216252 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" event={"ID":"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31","Type":"ContainerDied","Data":"4902b756d16a2d757f23db0d305dd72b3b77d24818e58b3e74e50e99b00d2dc3"} Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.638981 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.782021 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-ssh-key\") pod \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.782150 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-inventory\") pod \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.782289 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw8nd\" (UniqueName: \"kubernetes.io/projected/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-kube-api-access-sw8nd\") pod \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\" (UID: \"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31\") " Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.792866 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-kube-api-access-sw8nd" (OuterVolumeSpecName: "kube-api-access-sw8nd") pod "1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" (UID: "1a75b6ba-a5d4-4153-b7c4-95673fd9ce31"). InnerVolumeSpecName "kube-api-access-sw8nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.822012 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" (UID: "1a75b6ba-a5d4-4153-b7c4-95673fd9ce31"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.822968 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-inventory" (OuterVolumeSpecName: "inventory") pod "1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" (UID: "1a75b6ba-a5d4-4153-b7c4-95673fd9ce31"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.885527 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw8nd\" (UniqueName: \"kubernetes.io/projected/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-kube-api-access-sw8nd\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.885566 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:31 crc kubenswrapper[4953]: I1011 03:17:31.885577 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:17:32 crc kubenswrapper[4953]: I1011 03:17:32.241440 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" event={"ID":"1a75b6ba-a5d4-4153-b7c4-95673fd9ce31","Type":"ContainerDied","Data":"e39fa4e55279be9570a5742b147041fcb354398d21dc2f22edcfe6e31c8d0883"} Oct 11 03:17:32 crc kubenswrapper[4953]: I1011 03:17:32.241505 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs" Oct 11 03:17:32 crc kubenswrapper[4953]: I1011 03:17:32.241509 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e39fa4e55279be9570a5742b147041fcb354398d21dc2f22edcfe6e31c8d0883" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.317092 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.318117 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.642874 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h5zs8"] Oct 11 03:19:41 crc kubenswrapper[4953]: E1011 03:19:41.645448 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.645476 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.645695 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.647500 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.660563 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5zs8"] Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.750663 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-catalog-content\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.751023 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-utilities\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.751172 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj489\" (UniqueName: \"kubernetes.io/projected/29c6731b-d393-485c-9221-650d674e403e-kube-api-access-nj489\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.853774 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-catalog-content\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.853913 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-utilities\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.853946 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj489\" (UniqueName: \"kubernetes.io/projected/29c6731b-d393-485c-9221-650d674e403e-kube-api-access-nj489\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.854574 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-catalog-content\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.854949 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-utilities\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.875716 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj489\" (UniqueName: \"kubernetes.io/projected/29c6731b-d393-485c-9221-650d674e403e-kube-api-access-nj489\") pod \"community-operators-h5zs8\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:41 crc kubenswrapper[4953]: I1011 03:19:41.996981 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:42 crc kubenswrapper[4953]: I1011 03:19:42.558400 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5zs8"] Oct 11 03:19:42 crc kubenswrapper[4953]: I1011 03:19:42.756559 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerStarted","Data":"73fc6bc7bbbb478f94c2324c071f4fce2972334350474e8751e5c5b39c6fe101"} Oct 11 03:19:43 crc kubenswrapper[4953]: I1011 03:19:43.770501 4953 generic.go:334] "Generic (PLEG): container finished" podID="29c6731b-d393-485c-9221-650d674e403e" containerID="5c94b3bc5f3685df28719c40756682616b36cf728d83ba21a15e34bd4c03e343" exitCode=0 Oct 11 03:19:43 crc kubenswrapper[4953]: I1011 03:19:43.770571 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerDied","Data":"5c94b3bc5f3685df28719c40756682616b36cf728d83ba21a15e34bd4c03e343"} Oct 11 03:19:43 crc kubenswrapper[4953]: I1011 03:19:43.777073 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:19:44 crc kubenswrapper[4953]: I1011 03:19:44.779894 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerStarted","Data":"cdcba837a9d743950743e25e3a05ee67431f597cbd27d71f88057b1bdaaa0c3e"} Oct 11 03:19:45 crc kubenswrapper[4953]: I1011 03:19:45.798591 4953 generic.go:334] "Generic (PLEG): container finished" podID="29c6731b-d393-485c-9221-650d674e403e" containerID="cdcba837a9d743950743e25e3a05ee67431f597cbd27d71f88057b1bdaaa0c3e" exitCode=0 Oct 11 03:19:45 crc kubenswrapper[4953]: I1011 03:19:45.821322 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerDied","Data":"cdcba837a9d743950743e25e3a05ee67431f597cbd27d71f88057b1bdaaa0c3e"} Oct 11 03:19:46 crc kubenswrapper[4953]: I1011 03:19:46.814430 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerStarted","Data":"c0256f2a76964c6d88f26dde8103a51164ce6d301a7f8892a7a48bb2316d7625"} Oct 11 03:19:46 crc kubenswrapper[4953]: I1011 03:19:46.839622 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h5zs8" podStartSLOduration=3.255862192 podStartE2EDuration="5.839584528s" podCreationTimestamp="2025-10-11 03:19:41 +0000 UTC" firstStartedPulling="2025-10-11 03:19:43.776671138 +0000 UTC m=+1994.709758792" lastFinishedPulling="2025-10-11 03:19:46.360393484 +0000 UTC m=+1997.293481128" observedRunningTime="2025-10-11 03:19:46.833870043 +0000 UTC m=+1997.766957727" watchObservedRunningTime="2025-10-11 03:19:46.839584528 +0000 UTC m=+1997.772672182" Oct 11 03:19:51 crc kubenswrapper[4953]: I1011 03:19:51.997671 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:52 crc kubenswrapper[4953]: I1011 03:19:52.000782 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:52 crc kubenswrapper[4953]: I1011 03:19:52.090086 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:52 crc kubenswrapper[4953]: I1011 03:19:52.953268 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:53 crc kubenswrapper[4953]: I1011 03:19:53.012117 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5zs8"] Oct 11 03:19:54 crc kubenswrapper[4953]: I1011 03:19:54.913566 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h5zs8" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="registry-server" containerID="cri-o://c0256f2a76964c6d88f26dde8103a51164ce6d301a7f8892a7a48bb2316d7625" gracePeriod=2 Oct 11 03:19:55 crc kubenswrapper[4953]: I1011 03:19:55.926751 4953 generic.go:334] "Generic (PLEG): container finished" podID="29c6731b-d393-485c-9221-650d674e403e" containerID="c0256f2a76964c6d88f26dde8103a51164ce6d301a7f8892a7a48bb2316d7625" exitCode=0 Oct 11 03:19:55 crc kubenswrapper[4953]: I1011 03:19:55.926836 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerDied","Data":"c0256f2a76964c6d88f26dde8103a51164ce6d301a7f8892a7a48bb2316d7625"} Oct 11 03:19:55 crc kubenswrapper[4953]: I1011 03:19:55.927276 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5zs8" event={"ID":"29c6731b-d393-485c-9221-650d674e403e","Type":"ContainerDied","Data":"73fc6bc7bbbb478f94c2324c071f4fce2972334350474e8751e5c5b39c6fe101"} Oct 11 03:19:55 crc kubenswrapper[4953]: I1011 03:19:55.927295 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73fc6bc7bbbb478f94c2324c071f4fce2972334350474e8751e5c5b39c6fe101" Oct 11 03:19:55 crc kubenswrapper[4953]: I1011 03:19:55.946036 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.070127 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj489\" (UniqueName: \"kubernetes.io/projected/29c6731b-d393-485c-9221-650d674e403e-kube-api-access-nj489\") pod \"29c6731b-d393-485c-9221-650d674e403e\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.070338 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-utilities\") pod \"29c6731b-d393-485c-9221-650d674e403e\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.070408 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-catalog-content\") pod \"29c6731b-d393-485c-9221-650d674e403e\" (UID: \"29c6731b-d393-485c-9221-650d674e403e\") " Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.071626 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-utilities" (OuterVolumeSpecName: "utilities") pod "29c6731b-d393-485c-9221-650d674e403e" (UID: "29c6731b-d393-485c-9221-650d674e403e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.076723 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c6731b-d393-485c-9221-650d674e403e-kube-api-access-nj489" (OuterVolumeSpecName: "kube-api-access-nj489") pod "29c6731b-d393-485c-9221-650d674e403e" (UID: "29c6731b-d393-485c-9221-650d674e403e"). InnerVolumeSpecName "kube-api-access-nj489". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.140844 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29c6731b-d393-485c-9221-650d674e403e" (UID: "29c6731b-d393-485c-9221-650d674e403e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.172927 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj489\" (UniqueName: \"kubernetes.io/projected/29c6731b-d393-485c-9221-650d674e403e-kube-api-access-nj489\") on node \"crc\" DevicePath \"\"" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.173264 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.173332 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c6731b-d393-485c-9221-650d674e403e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.938744 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5zs8" Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.988653 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5zs8"] Oct 11 03:19:56 crc kubenswrapper[4953]: I1011 03:19:56.995489 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h5zs8"] Oct 11 03:19:57 crc kubenswrapper[4953]: I1011 03:19:57.820960 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c6731b-d393-485c-9221-650d674e403e" path="/var/lib/kubelet/pods/29c6731b-d393-485c-9221-650d674e403e/volumes" Oct 11 03:20:11 crc kubenswrapper[4953]: I1011 03:20:11.316691 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:20:11 crc kubenswrapper[4953]: I1011 03:20:11.317553 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.316582 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.317272 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.317333 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.322680 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"822960e558e9bbd8c59cb80cc0b26e882a882ee26095ad53abee11e91f461b31"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.322826 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://822960e558e9bbd8c59cb80cc0b26e882a882ee26095ad53abee11e91f461b31" gracePeriod=600 Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.499074 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="822960e558e9bbd8c59cb80cc0b26e882a882ee26095ad53abee11e91f461b31" exitCode=0 Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.499366 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"822960e558e9bbd8c59cb80cc0b26e882a882ee26095ad53abee11e91f461b31"} Oct 11 03:20:41 crc kubenswrapper[4953]: I1011 03:20:41.499539 4953 scope.go:117] "RemoveContainer" containerID="e76b6b68492e00af798d11d91503ace56b88ec4a28a0cd741069ab9e1e1ab39b" Oct 11 03:20:42 crc kubenswrapper[4953]: I1011 03:20:42.511830 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4"} Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.906456 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rgbnx"] Oct 11 03:21:15 crc kubenswrapper[4953]: E1011 03:21:15.907434 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="extract-content" Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.907447 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="extract-content" Oct 11 03:21:15 crc kubenswrapper[4953]: E1011 03:21:15.907461 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="registry-server" Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.907466 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="registry-server" Oct 11 03:21:15 crc kubenswrapper[4953]: E1011 03:21:15.907496 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="extract-utilities" Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.907502 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="extract-utilities" Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.907679 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c6731b-d393-485c-9221-650d674e403e" containerName="registry-server" Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.909160 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:15 crc kubenswrapper[4953]: I1011 03:21:15.947715 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rgbnx"] Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.103057 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-utilities\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.103130 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-catalog-content\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.103228 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7zk6\" (UniqueName: \"kubernetes.io/projected/98f0e3b0-e43d-4866-ba7f-32501d95d42f-kube-api-access-h7zk6\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.204795 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7zk6\" (UniqueName: \"kubernetes.io/projected/98f0e3b0-e43d-4866-ba7f-32501d95d42f-kube-api-access-h7zk6\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.204916 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-utilities\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.204958 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-catalog-content\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.205384 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-catalog-content\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.205900 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-utilities\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.244470 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7zk6\" (UniqueName: \"kubernetes.io/projected/98f0e3b0-e43d-4866-ba7f-32501d95d42f-kube-api-access-h7zk6\") pod \"certified-operators-rgbnx\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:16 crc kubenswrapper[4953]: I1011 03:21:16.540816 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:17 crc kubenswrapper[4953]: I1011 03:21:17.054254 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rgbnx"] Oct 11 03:21:17 crc kubenswrapper[4953]: I1011 03:21:17.913680 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerDied","Data":"bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742"} Oct 11 03:21:17 crc kubenswrapper[4953]: I1011 03:21:17.921261 4953 generic.go:334] "Generic (PLEG): container finished" podID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerID="bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742" exitCode=0 Oct 11 03:21:17 crc kubenswrapper[4953]: I1011 03:21:17.921350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerStarted","Data":"be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee"} Oct 11 03:21:18 crc kubenswrapper[4953]: I1011 03:21:18.932790 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerStarted","Data":"c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6"} Oct 11 03:21:19 crc kubenswrapper[4953]: I1011 03:21:19.942749 4953 generic.go:334] "Generic (PLEG): container finished" podID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerID="c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6" exitCode=0 Oct 11 03:21:19 crc kubenswrapper[4953]: I1011 03:21:19.942866 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerDied","Data":"c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6"} Oct 11 03:21:20 crc kubenswrapper[4953]: I1011 03:21:20.959528 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerStarted","Data":"d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d"} Oct 11 03:21:20 crc kubenswrapper[4953]: I1011 03:21:20.996295 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rgbnx" podStartSLOduration=3.454720803 podStartE2EDuration="5.996248297s" podCreationTimestamp="2025-10-11 03:21:15 +0000 UTC" firstStartedPulling="2025-10-11 03:21:17.916864287 +0000 UTC m=+2088.849951941" lastFinishedPulling="2025-10-11 03:21:20.458391781 +0000 UTC m=+2091.391479435" observedRunningTime="2025-10-11 03:21:20.981853871 +0000 UTC m=+2091.914941555" watchObservedRunningTime="2025-10-11 03:21:20.996248297 +0000 UTC m=+2091.929335991" Oct 11 03:21:26 crc kubenswrapper[4953]: I1011 03:21:26.541560 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:26 crc kubenswrapper[4953]: I1011 03:21:26.542425 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:26 crc kubenswrapper[4953]: I1011 03:21:26.628744 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:27 crc kubenswrapper[4953]: I1011 03:21:27.106517 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:27 crc kubenswrapper[4953]: I1011 03:21:27.208627 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rgbnx"] Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.051857 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rgbnx" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="registry-server" containerID="cri-o://d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d" gracePeriod=2 Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.565456 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.712945 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7zk6\" (UniqueName: \"kubernetes.io/projected/98f0e3b0-e43d-4866-ba7f-32501d95d42f-kube-api-access-h7zk6\") pod \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.713145 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-catalog-content\") pod \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.713208 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-utilities\") pod \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\" (UID: \"98f0e3b0-e43d-4866-ba7f-32501d95d42f\") " Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.714106 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-utilities" (OuterVolumeSpecName: "utilities") pod "98f0e3b0-e43d-4866-ba7f-32501d95d42f" (UID: "98f0e3b0-e43d-4866-ba7f-32501d95d42f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.721848 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98f0e3b0-e43d-4866-ba7f-32501d95d42f-kube-api-access-h7zk6" (OuterVolumeSpecName: "kube-api-access-h7zk6") pod "98f0e3b0-e43d-4866-ba7f-32501d95d42f" (UID: "98f0e3b0-e43d-4866-ba7f-32501d95d42f"). InnerVolumeSpecName "kube-api-access-h7zk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.755189 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98f0e3b0-e43d-4866-ba7f-32501d95d42f" (UID: "98f0e3b0-e43d-4866-ba7f-32501d95d42f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.815884 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.816218 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98f0e3b0-e43d-4866-ba7f-32501d95d42f-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:21:29 crc kubenswrapper[4953]: I1011 03:21:29.816349 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7zk6\" (UniqueName: \"kubernetes.io/projected/98f0e3b0-e43d-4866-ba7f-32501d95d42f-kube-api-access-h7zk6\") on node \"crc\" DevicePath \"\"" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.060179 4953 generic.go:334] "Generic (PLEG): container finished" podID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerID="d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d" exitCode=0 Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.060243 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rgbnx" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.060265 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerDied","Data":"d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d"} Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.060906 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rgbnx" event={"ID":"98f0e3b0-e43d-4866-ba7f-32501d95d42f","Type":"ContainerDied","Data":"be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee"} Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.060930 4953 scope.go:117] "RemoveContainer" containerID="d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.082807 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rgbnx"] Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.089724 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rgbnx"] Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.095138 4953 scope.go:117] "RemoveContainer" containerID="c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.119374 4953 scope.go:117] "RemoveContainer" containerID="bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.182315 4953 scope.go:117] "RemoveContainer" containerID="d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d" Oct 11 03:21:30 crc kubenswrapper[4953]: E1011 03:21:30.182730 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d\": container with ID starting with d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d not found: ID does not exist" containerID="d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.182758 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d"} err="failed to get container status \"d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d\": rpc error: code = NotFound desc = could not find container \"d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d\": container with ID starting with d697126bbe22b335d87554138a724a399dd910e5f77224dc505773bb7a693f4d not found: ID does not exist" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.182781 4953 scope.go:117] "RemoveContainer" containerID="c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6" Oct 11 03:21:30 crc kubenswrapper[4953]: E1011 03:21:30.183032 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6\": container with ID starting with c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6 not found: ID does not exist" containerID="c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.183050 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6"} err="failed to get container status \"c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6\": rpc error: code = NotFound desc = could not find container \"c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6\": container with ID starting with c68db2c748000e213135f9a3783c9b72b1136034396c976be86bd9c1dac682e6 not found: ID does not exist" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.183064 4953 scope.go:117] "RemoveContainer" containerID="bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742" Oct 11 03:21:30 crc kubenswrapper[4953]: E1011 03:21:30.183283 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742\": container with ID starting with bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742 not found: ID does not exist" containerID="bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742" Oct 11 03:21:30 crc kubenswrapper[4953]: I1011 03:21:30.183305 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742"} err="failed to get container status \"bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742\": rpc error: code = NotFound desc = could not find container \"bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742\": container with ID starting with bbfff214dc9612d33e23fcd18d3a24584930c218710f1e7b91f85d21d5658742 not found: ID does not exist" Oct 11 03:21:31 crc kubenswrapper[4953]: I1011 03:21:31.817699 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" path="/var/lib/kubelet/pods/98f0e3b0-e43d-4866-ba7f-32501d95d42f/volumes" Oct 11 03:21:34 crc kubenswrapper[4953]: E1011 03:21:34.073499 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice/crio-be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee\": RecentStats: unable to find data in memory cache]" Oct 11 03:21:44 crc kubenswrapper[4953]: E1011 03:21:44.310543 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice/crio-be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee\": RecentStats: unable to find data in memory cache]" Oct 11 03:21:54 crc kubenswrapper[4953]: E1011 03:21:54.593204 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice/crio-be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee\": RecentStats: unable to find data in memory cache]" Oct 11 03:22:04 crc kubenswrapper[4953]: E1011 03:22:04.928582 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice/crio-be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:22:15 crc kubenswrapper[4953]: E1011 03:22:15.169106 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice/crio-be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee\": RecentStats: unable to find data in memory cache]" Oct 11 03:22:25 crc kubenswrapper[4953]: E1011 03:22:25.414310 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice/crio-be9711351510858467ed8ceff5dc62be92688afb5196282f83a1e2b527acc8ee\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod98f0e3b0_e43d_4866_ba7f_32501d95d42f.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:22:29 crc kubenswrapper[4953]: E1011 03:22:29.836742 4953 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/10d91049024bbcc5f8d88ddd989b06cdbee6a8d59cbce3a1c27f0029f88a72a3/diff" to get inode usage: stat /var/lib/containers/storage/overlay/10d91049024bbcc5f8d88ddd989b06cdbee6a8d59cbce3a1c27f0029f88a72a3/diff: no such file or directory, extraDiskErr: Oct 11 03:22:41 crc kubenswrapper[4953]: I1011 03:22:41.316626 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:22:41 crc kubenswrapper[4953]: I1011 03:22:41.317191 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.346253 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4s56g"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.363186 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.369805 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.377342 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-4s56g"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.386005 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.392798 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-gpvm9"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.400335 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.406291 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.413136 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.419375 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c7btq"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.425371 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-ng5mf"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.433639 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.442655 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4wtcc"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.449303 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.455888 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.485924 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.492951 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-wrz56"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.501202 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-tnl5m"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.509046 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pwb64"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.516877 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nh68x"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.524384 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-qtjzx"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.532021 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-w5cvs"] Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.805157 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a75b6ba-a5d4-4153-b7c4-95673fd9ce31" path="/var/lib/kubelet/pods/1a75b6ba-a5d4-4153-b7c4-95673fd9ce31/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.805702 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2598cf72-6bcd-4d4d-a34d-b157fab230fd" path="/var/lib/kubelet/pods/2598cf72-6bcd-4d4d-a34d-b157fab230fd/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.806240 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f235983-7690-4d9b-89a7-6f9d873b80b4" path="/var/lib/kubelet/pods/2f235983-7690-4d9b-89a7-6f9d873b80b4/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.806924 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53512175-e6aa-4d31-aa79-0d0c651a61dd" path="/var/lib/kubelet/pods/53512175-e6aa-4d31-aa79-0d0c651a61dd/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.808946 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6" path="/var/lib/kubelet/pods/7aa439c4-bdfa-4fa6-b69d-cef4c35f61f6/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.809705 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8046051a-f405-4a56-95dd-a89214b321f1" path="/var/lib/kubelet/pods/8046051a-f405-4a56-95dd-a89214b321f1/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.810220 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bf05143-ee92-4be9-a6a7-0138e0e621c5" path="/var/lib/kubelet/pods/9bf05143-ee92-4be9-a6a7-0138e0e621c5/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.811184 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3" path="/var/lib/kubelet/pods/9dffdf6e-b5a7-4249-804c-e65e8f4c5ad3/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.811752 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa7542dd-a07c-4d37-bba5-4b72a648d586" path="/var/lib/kubelet/pods/aa7542dd-a07c-4d37-bba5-4b72a648d586/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.812289 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5d756a9-af2d-483a-9f6b-97974302220a" path="/var/lib/kubelet/pods/b5d756a9-af2d-483a-9f6b-97974302220a/volumes" Oct 11 03:22:59 crc kubenswrapper[4953]: I1011 03:22:59.813275 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baae5cb7-14c0-4367-ab6f-ecb4a323c6c3" path="/var/lib/kubelet/pods/baae5cb7-14c0-4367-ab6f-ecb4a323c6c3/volumes" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.114816 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5"] Oct 11 03:23:05 crc kubenswrapper[4953]: E1011 03:23:05.116238 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="extract-utilities" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.116271 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="extract-utilities" Oct 11 03:23:05 crc kubenswrapper[4953]: E1011 03:23:05.116311 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="extract-content" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.116331 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="extract-content" Oct 11 03:23:05 crc kubenswrapper[4953]: E1011 03:23:05.116379 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="registry-server" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.116399 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="registry-server" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.117128 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f0e3b0-e43d-4866-ba7f-32501d95d42f" containerName="registry-server" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.118212 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.121214 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.121214 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.121214 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.123018 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.124814 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.139472 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5"] Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.190966 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.191146 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.191283 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xknd2\" (UniqueName: \"kubernetes.io/projected/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-kube-api-access-xknd2\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.191313 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.191491 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.293858 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.293929 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.293966 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xknd2\" (UniqueName: \"kubernetes.io/projected/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-kube-api-access-xknd2\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.293990 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.294041 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.299681 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.299719 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.300092 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.309015 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.309302 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xknd2\" (UniqueName: \"kubernetes.io/projected/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-kube-api-access-xknd2\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:05 crc kubenswrapper[4953]: I1011 03:23:05.465774 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:06 crc kubenswrapper[4953]: I1011 03:23:06.045321 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5"] Oct 11 03:23:06 crc kubenswrapper[4953]: I1011 03:23:06.097519 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" event={"ID":"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca","Type":"ContainerStarted","Data":"6a5f29a2e56fdfa18bfdccdddb6fa12e6a106d5a74979e73f858cc0e3cda0fff"} Oct 11 03:23:07 crc kubenswrapper[4953]: I1011 03:23:07.105412 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" event={"ID":"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca","Type":"ContainerStarted","Data":"4eb5e078e00be166e71b57af86f422009fa8f6f5eaf3b843bbe80b11a0bc93fd"} Oct 11 03:23:07 crc kubenswrapper[4953]: I1011 03:23:07.142084 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" podStartSLOduration=1.471130211 podStartE2EDuration="2.142052741s" podCreationTimestamp="2025-10-11 03:23:05 +0000 UTC" firstStartedPulling="2025-10-11 03:23:06.048747921 +0000 UTC m=+2196.981835565" lastFinishedPulling="2025-10-11 03:23:06.719670421 +0000 UTC m=+2197.652758095" observedRunningTime="2025-10-11 03:23:07.125314555 +0000 UTC m=+2198.058402199" watchObservedRunningTime="2025-10-11 03:23:07.142052741 +0000 UTC m=+2198.075140425" Oct 11 03:23:11 crc kubenswrapper[4953]: I1011 03:23:11.317152 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:23:11 crc kubenswrapper[4953]: I1011 03:23:11.317549 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:23:19 crc kubenswrapper[4953]: I1011 03:23:19.230627 4953 generic.go:334] "Generic (PLEG): container finished" podID="7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" containerID="4eb5e078e00be166e71b57af86f422009fa8f6f5eaf3b843bbe80b11a0bc93fd" exitCode=0 Oct 11 03:23:19 crc kubenswrapper[4953]: I1011 03:23:19.230652 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" event={"ID":"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca","Type":"ContainerDied","Data":"4eb5e078e00be166e71b57af86f422009fa8f6f5eaf3b843bbe80b11a0bc93fd"} Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.687107 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.725116 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ssh-key\") pod \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.725295 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-repo-setup-combined-ca-bundle\") pod \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.725372 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ceph\") pod \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.725426 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xknd2\" (UniqueName: \"kubernetes.io/projected/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-kube-api-access-xknd2\") pod \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.725542 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-inventory\") pod \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\" (UID: \"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca\") " Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.732020 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-kube-api-access-xknd2" (OuterVolumeSpecName: "kube-api-access-xknd2") pod "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" (UID: "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca"). InnerVolumeSpecName "kube-api-access-xknd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.732312 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ceph" (OuterVolumeSpecName: "ceph") pod "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" (UID: "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.732716 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" (UID: "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.760294 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-inventory" (OuterVolumeSpecName: "inventory") pod "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" (UID: "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.771197 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" (UID: "7c3615eb-2efa-4cbc-9eb0-c207f6d322ca"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.828384 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.828772 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xknd2\" (UniqueName: \"kubernetes.io/projected/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-kube-api-access-xknd2\") on node \"crc\" DevicePath \"\"" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.828786 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.828799 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:23:20 crc kubenswrapper[4953]: I1011 03:23:20.828811 4953 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3615eb-2efa-4cbc-9eb0-c207f6d322ca-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.264640 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" event={"ID":"7c3615eb-2efa-4cbc-9eb0-c207f6d322ca","Type":"ContainerDied","Data":"6a5f29a2e56fdfa18bfdccdddb6fa12e6a106d5a74979e73f858cc0e3cda0fff"} Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.264697 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a5f29a2e56fdfa18bfdccdddb6fa12e6a106d5a74979e73f858cc0e3cda0fff" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.264772 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.372366 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s"] Oct 11 03:23:21 crc kubenswrapper[4953]: E1011 03:23:21.372974 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.373000 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.373213 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c3615eb-2efa-4cbc-9eb0-c207f6d322ca" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.374718 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.381937 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.381978 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.382367 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.382721 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.391483 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s"] Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.392701 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.446026 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.446191 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.446228 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.446271 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqz97\" (UniqueName: \"kubernetes.io/projected/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-kube-api-access-mqz97\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.446301 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.547818 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.547897 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.547923 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqz97\" (UniqueName: \"kubernetes.io/projected/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-kube-api-access-mqz97\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.547973 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.548016 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.551796 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.552249 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.553092 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.553519 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.564916 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqz97\" (UniqueName: \"kubernetes.io/projected/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-kube-api-access-mqz97\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:21 crc kubenswrapper[4953]: I1011 03:23:21.718424 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:23:22 crc kubenswrapper[4953]: I1011 03:23:22.312419 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s"] Oct 11 03:23:23 crc kubenswrapper[4953]: I1011 03:23:23.284253 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" event={"ID":"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd","Type":"ContainerStarted","Data":"03e4c2e3eea482c3bca0894cb87c27e51b7f1923745c9d5321e2bb98f204a05c"} Oct 11 03:23:23 crc kubenswrapper[4953]: I1011 03:23:23.284994 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" event={"ID":"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd","Type":"ContainerStarted","Data":"e3a0c2278728810ed3c72f08d4f830ac1bbe379b54be6f90a80aa7d92ad60e38"} Oct 11 03:23:23 crc kubenswrapper[4953]: I1011 03:23:23.308212 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" podStartSLOduration=1.8199694260000001 podStartE2EDuration="2.308189049s" podCreationTimestamp="2025-10-11 03:23:21 +0000 UTC" firstStartedPulling="2025-10-11 03:23:22.322085807 +0000 UTC m=+2213.255173451" lastFinishedPulling="2025-10-11 03:23:22.81030543 +0000 UTC m=+2213.743393074" observedRunningTime="2025-10-11 03:23:23.3026863 +0000 UTC m=+2214.235773964" watchObservedRunningTime="2025-10-11 03:23:23.308189049 +0000 UTC m=+2214.241276693" Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.316804 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.317412 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.317454 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.318163 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.318209 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" gracePeriod=600 Oct 11 03:23:41 crc kubenswrapper[4953]: E1011 03:23:41.443676 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.458490 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" exitCode=0 Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.458543 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4"} Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.458586 4953 scope.go:117] "RemoveContainer" containerID="822960e558e9bbd8c59cb80cc0b26e882a882ee26095ad53abee11e91f461b31" Oct 11 03:23:41 crc kubenswrapper[4953]: I1011 03:23:41.459363 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:23:41 crc kubenswrapper[4953]: E1011 03:23:41.459784 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.005323 4953 scope.go:117] "RemoveContainer" containerID="51547b20134e6ad5d7fdc673e28d551a89377b8758d42b2fe5b30461863a501e" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.034164 4953 scope.go:117] "RemoveContainer" containerID="534df06864b09e04009755ea8df43728c31ccade5483ea4d7df96caed9b59fc7" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.075888 4953 scope.go:117] "RemoveContainer" containerID="4902b756d16a2d757f23db0d305dd72b3b77d24818e58b3e74e50e99b00d2dc3" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.115860 4953 scope.go:117] "RemoveContainer" containerID="d19d84b3d7e5326e5e61b4acb322d65ef87c1c907940b0c5815849fb00fa03f3" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.150405 4953 scope.go:117] "RemoveContainer" containerID="cec342b479262ba7961555f0901452632cf6452ee3f1f2529f092edc78bfa3a6" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.199914 4953 scope.go:117] "RemoveContainer" containerID="8aaa30bd0a2578dc1fb82afcd47537ba3ce1fc69273bccb8ea6f5ee456c7a3e3" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.226015 4953 scope.go:117] "RemoveContainer" containerID="54c6f8773b2c35522c9e9e1b1cb22430d96834e0b846a429c53d3d7817d43036" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.269313 4953 scope.go:117] "RemoveContainer" containerID="c70d04fc4d6012d439ef661c72ac8aaf720d7beddb3451f1a50d15565e7942b4" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.313863 4953 scope.go:117] "RemoveContainer" containerID="be70a08430be37c69d86203e14670da797433ca61110a5b71e06d8c57c9961a9" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.376267 4953 scope.go:117] "RemoveContainer" containerID="e643b7e79ff51a390869993e1ae2fc62c6bbd5768ffb5a949291991419d24752" Oct 11 03:23:49 crc kubenswrapper[4953]: I1011 03:23:49.447938 4953 scope.go:117] "RemoveContainer" containerID="d62492716b41f187b722cbfb7ad010e8581881175b6634238f35a5d33a4127ab" Oct 11 03:23:52 crc kubenswrapper[4953]: I1011 03:23:52.796257 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:23:52 crc kubenswrapper[4953]: E1011 03:23:52.797395 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:24:05 crc kubenswrapper[4953]: I1011 03:24:05.796100 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:24:05 crc kubenswrapper[4953]: E1011 03:24:05.796934 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:24:20 crc kubenswrapper[4953]: I1011 03:24:20.795452 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:24:20 crc kubenswrapper[4953]: E1011 03:24:20.796573 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:24:32 crc kubenswrapper[4953]: I1011 03:24:32.796045 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:24:32 crc kubenswrapper[4953]: E1011 03:24:32.797059 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:24:47 crc kubenswrapper[4953]: I1011 03:24:47.796031 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:24:47 crc kubenswrapper[4953]: E1011 03:24:47.797382 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:24:56 crc kubenswrapper[4953]: I1011 03:24:56.196517 4953 generic.go:334] "Generic (PLEG): container finished" podID="c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" containerID="03e4c2e3eea482c3bca0894cb87c27e51b7f1923745c9d5321e2bb98f204a05c" exitCode=0 Oct 11 03:24:56 crc kubenswrapper[4953]: I1011 03:24:56.196639 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" event={"ID":"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd","Type":"ContainerDied","Data":"03e4c2e3eea482c3bca0894cb87c27e51b7f1923745c9d5321e2bb98f204a05c"} Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.585730 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.649049 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-bootstrap-combined-ca-bundle\") pod \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.649109 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ssh-key\") pod \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.649133 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqz97\" (UniqueName: \"kubernetes.io/projected/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-kube-api-access-mqz97\") pod \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.649206 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-inventory\") pod \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.649309 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ceph\") pod \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\" (UID: \"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd\") " Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.655927 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ceph" (OuterVolumeSpecName: "ceph") pod "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" (UID: "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.656326 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" (UID: "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.657885 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-kube-api-access-mqz97" (OuterVolumeSpecName: "kube-api-access-mqz97") pod "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" (UID: "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd"). InnerVolumeSpecName "kube-api-access-mqz97". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.682634 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" (UID: "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.700913 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-inventory" (OuterVolumeSpecName: "inventory") pod "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" (UID: "c4fa87fc-5064-4fd7-93d7-08ee6d0428bd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.753509 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.753550 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.753620 4953 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.753633 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:24:57 crc kubenswrapper[4953]: I1011 03:24:57.753646 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqz97\" (UniqueName: \"kubernetes.io/projected/c4fa87fc-5064-4fd7-93d7-08ee6d0428bd-kube-api-access-mqz97\") on node \"crc\" DevicePath \"\"" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.219815 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" event={"ID":"c4fa87fc-5064-4fd7-93d7-08ee6d0428bd","Type":"ContainerDied","Data":"e3a0c2278728810ed3c72f08d4f830ac1bbe379b54be6f90a80aa7d92ad60e38"} Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.219872 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.219883 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3a0c2278728810ed3c72f08d4f830ac1bbe379b54be6f90a80aa7d92ad60e38" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.345164 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2"] Oct 11 03:24:58 crc kubenswrapper[4953]: E1011 03:24:58.346889 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.347076 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.347592 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4fa87fc-5064-4fd7-93d7-08ee6d0428bd" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.349025 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.351931 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.353257 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.353354 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.353522 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.354773 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.360869 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2"] Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.364734 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.365128 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.365293 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w62lh\" (UniqueName: \"kubernetes.io/projected/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-kube-api-access-w62lh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.365455 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.467988 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.468046 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w62lh\" (UniqueName: \"kubernetes.io/projected/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-kube-api-access-w62lh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.468078 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.468177 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.472473 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.473252 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.473311 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.491483 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w62lh\" (UniqueName: \"kubernetes.io/projected/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-kube-api-access-w62lh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:58 crc kubenswrapper[4953]: I1011 03:24:58.678030 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:24:59 crc kubenswrapper[4953]: I1011 03:24:59.051493 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2"] Oct 11 03:24:59 crc kubenswrapper[4953]: I1011 03:24:59.056339 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:24:59 crc kubenswrapper[4953]: I1011 03:24:59.231050 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" event={"ID":"2329ccc0-b566-4f3e-a67a-3fc4c5df824a","Type":"ContainerStarted","Data":"53c96ab688471865fdd18e3e7a3c745b8ff4054f667b7e4fd92d932013cb8314"} Oct 11 03:25:00 crc kubenswrapper[4953]: I1011 03:25:00.247793 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" event={"ID":"2329ccc0-b566-4f3e-a67a-3fc4c5df824a","Type":"ContainerStarted","Data":"3cee364ac47b6c812a8c5f07eb5ec213a4dae569895d2fd95eb57a96e99ea3ba"} Oct 11 03:25:00 crc kubenswrapper[4953]: I1011 03:25:00.313135 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" podStartSLOduration=1.876363661 podStartE2EDuration="2.313097574s" podCreationTimestamp="2025-10-11 03:24:58 +0000 UTC" firstStartedPulling="2025-10-11 03:24:59.056005899 +0000 UTC m=+2309.989093543" lastFinishedPulling="2025-10-11 03:24:59.492739802 +0000 UTC m=+2310.425827456" observedRunningTime="2025-10-11 03:25:00.272423305 +0000 UTC m=+2311.205510989" watchObservedRunningTime="2025-10-11 03:25:00.313097574 +0000 UTC m=+2311.246185258" Oct 11 03:25:01 crc kubenswrapper[4953]: I1011 03:25:01.798661 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:25:01 crc kubenswrapper[4953]: E1011 03:25:01.799101 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:25:12 crc kubenswrapper[4953]: I1011 03:25:12.797912 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:25:12 crc kubenswrapper[4953]: E1011 03:25:12.798921 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.652906 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mlnmt"] Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.660594 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.683302 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlnmt"] Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.841162 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-utilities\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.841303 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6d7r\" (UniqueName: \"kubernetes.io/projected/46d4ccab-11f2-45fd-9349-5ea3dea295c0-kube-api-access-s6d7r\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.841400 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-catalog-content\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.943515 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-utilities\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.943633 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6d7r\" (UniqueName: \"kubernetes.io/projected/46d4ccab-11f2-45fd-9349-5ea3dea295c0-kube-api-access-s6d7r\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.943727 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-catalog-content\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.944185 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-utilities\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.944223 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-catalog-content\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.965312 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6d7r\" (UniqueName: \"kubernetes.io/projected/46d4ccab-11f2-45fd-9349-5ea3dea295c0-kube-api-access-s6d7r\") pod \"redhat-marketplace-mlnmt\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:23 crc kubenswrapper[4953]: I1011 03:25:23.994504 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:24 crc kubenswrapper[4953]: I1011 03:25:24.443818 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlnmt"] Oct 11 03:25:24 crc kubenswrapper[4953]: I1011 03:25:24.530361 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlnmt" event={"ID":"46d4ccab-11f2-45fd-9349-5ea3dea295c0","Type":"ContainerStarted","Data":"7e72b5b89f9f7d55360244dde286002cd28c2bebb8a375bdfeaaaaca68ac7dff"} Oct 11 03:25:25 crc kubenswrapper[4953]: I1011 03:25:25.542140 4953 generic.go:334] "Generic (PLEG): container finished" podID="2329ccc0-b566-4f3e-a67a-3fc4c5df824a" containerID="3cee364ac47b6c812a8c5f07eb5ec213a4dae569895d2fd95eb57a96e99ea3ba" exitCode=0 Oct 11 03:25:25 crc kubenswrapper[4953]: I1011 03:25:25.542241 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" event={"ID":"2329ccc0-b566-4f3e-a67a-3fc4c5df824a","Type":"ContainerDied","Data":"3cee364ac47b6c812a8c5f07eb5ec213a4dae569895d2fd95eb57a96e99ea3ba"} Oct 11 03:25:25 crc kubenswrapper[4953]: I1011 03:25:25.543878 4953 generic.go:334] "Generic (PLEG): container finished" podID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerID="175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81" exitCode=0 Oct 11 03:25:25 crc kubenswrapper[4953]: I1011 03:25:25.543924 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlnmt" event={"ID":"46d4ccab-11f2-45fd-9349-5ea3dea295c0","Type":"ContainerDied","Data":"175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81"} Oct 11 03:25:25 crc kubenswrapper[4953]: I1011 03:25:25.795337 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:25:25 crc kubenswrapper[4953]: E1011 03:25:25.795761 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.034591 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.108447 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ssh-key\") pod \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.108547 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w62lh\" (UniqueName: \"kubernetes.io/projected/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-kube-api-access-w62lh\") pod \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.108626 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-inventory\") pod \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.108674 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ceph\") pod \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\" (UID: \"2329ccc0-b566-4f3e-a67a-3fc4c5df824a\") " Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.117144 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-kube-api-access-w62lh" (OuterVolumeSpecName: "kube-api-access-w62lh") pod "2329ccc0-b566-4f3e-a67a-3fc4c5df824a" (UID: "2329ccc0-b566-4f3e-a67a-3fc4c5df824a"). InnerVolumeSpecName "kube-api-access-w62lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.117596 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ceph" (OuterVolumeSpecName: "ceph") pod "2329ccc0-b566-4f3e-a67a-3fc4c5df824a" (UID: "2329ccc0-b566-4f3e-a67a-3fc4c5df824a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.139095 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2329ccc0-b566-4f3e-a67a-3fc4c5df824a" (UID: "2329ccc0-b566-4f3e-a67a-3fc4c5df824a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.150488 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-inventory" (OuterVolumeSpecName: "inventory") pod "2329ccc0-b566-4f3e-a67a-3fc4c5df824a" (UID: "2329ccc0-b566-4f3e-a67a-3fc4c5df824a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.210996 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.211040 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.211057 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w62lh\" (UniqueName: \"kubernetes.io/projected/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-kube-api-access-w62lh\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.211069 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2329ccc0-b566-4f3e-a67a-3fc4c5df824a-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.566981 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" event={"ID":"2329ccc0-b566-4f3e-a67a-3fc4c5df824a","Type":"ContainerDied","Data":"53c96ab688471865fdd18e3e7a3c745b8ff4054f667b7e4fd92d932013cb8314"} Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.567534 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53c96ab688471865fdd18e3e7a3c745b8ff4054f667b7e4fd92d932013cb8314" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.567028 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.569561 4953 generic.go:334] "Generic (PLEG): container finished" podID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerID="2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276" exitCode=0 Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.569651 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlnmt" event={"ID":"46d4ccab-11f2-45fd-9349-5ea3dea295c0","Type":"ContainerDied","Data":"2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276"} Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.658360 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw"] Oct 11 03:25:27 crc kubenswrapper[4953]: E1011 03:25:27.658841 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2329ccc0-b566-4f3e-a67a-3fc4c5df824a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.658864 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2329ccc0-b566-4f3e-a67a-3fc4c5df824a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.659057 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2329ccc0-b566-4f3e-a67a-3fc4c5df824a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.659695 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.664038 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.664106 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.664104 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.664038 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.664250 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.668134 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw"] Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.722371 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.722783 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmfrc\" (UniqueName: \"kubernetes.io/projected/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-kube-api-access-rmfrc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.722900 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.723042 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.824757 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmfrc\" (UniqueName: \"kubernetes.io/projected/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-kube-api-access-rmfrc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.825094 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.825260 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.825449 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.828825 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.828879 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.829371 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.851390 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmfrc\" (UniqueName: \"kubernetes.io/projected/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-kube-api-access-rmfrc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:27 crc kubenswrapper[4953]: I1011 03:25:27.993435 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:28 crc kubenswrapper[4953]: I1011 03:25:28.545487 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw"] Oct 11 03:25:28 crc kubenswrapper[4953]: W1011 03:25:28.546656 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf89bcbf_9d91_4ce7_8919_9b0c47b5d498.slice/crio-7506af4185f355e4dfd4ce6ccbe10d956e060c1e9d5ab2c4a8924aa8b4503d56 WatchSource:0}: Error finding container 7506af4185f355e4dfd4ce6ccbe10d956e060c1e9d5ab2c4a8924aa8b4503d56: Status 404 returned error can't find the container with id 7506af4185f355e4dfd4ce6ccbe10d956e060c1e9d5ab2c4a8924aa8b4503d56 Oct 11 03:25:28 crc kubenswrapper[4953]: I1011 03:25:28.592478 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlnmt" event={"ID":"46d4ccab-11f2-45fd-9349-5ea3dea295c0","Type":"ContainerStarted","Data":"8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d"} Oct 11 03:25:28 crc kubenswrapper[4953]: I1011 03:25:28.597205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" event={"ID":"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498","Type":"ContainerStarted","Data":"7506af4185f355e4dfd4ce6ccbe10d956e060c1e9d5ab2c4a8924aa8b4503d56"} Oct 11 03:25:28 crc kubenswrapper[4953]: I1011 03:25:28.616480 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mlnmt" podStartSLOduration=3.066558356 podStartE2EDuration="5.616447567s" podCreationTimestamp="2025-10-11 03:25:23 +0000 UTC" firstStartedPulling="2025-10-11 03:25:25.545988113 +0000 UTC m=+2336.479075777" lastFinishedPulling="2025-10-11 03:25:28.095877344 +0000 UTC m=+2339.028964988" observedRunningTime="2025-10-11 03:25:28.614903768 +0000 UTC m=+2339.547991422" watchObservedRunningTime="2025-10-11 03:25:28.616447567 +0000 UTC m=+2339.549535211" Oct 11 03:25:29 crc kubenswrapper[4953]: I1011 03:25:29.609860 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" event={"ID":"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498","Type":"ContainerStarted","Data":"c4d64a0fa00e13cc5cb9b8f442029fbb66faf60171aad61215c7ff4c7f5e432a"} Oct 11 03:25:29 crc kubenswrapper[4953]: I1011 03:25:29.638568 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" podStartSLOduration=2.023851608 podStartE2EDuration="2.63854148s" podCreationTimestamp="2025-10-11 03:25:27 +0000 UTC" firstStartedPulling="2025-10-11 03:25:28.551297049 +0000 UTC m=+2339.484384693" lastFinishedPulling="2025-10-11 03:25:29.165986911 +0000 UTC m=+2340.099074565" observedRunningTime="2025-10-11 03:25:29.635696388 +0000 UTC m=+2340.568784092" watchObservedRunningTime="2025-10-11 03:25:29.63854148 +0000 UTC m=+2340.571629124" Oct 11 03:25:33 crc kubenswrapper[4953]: I1011 03:25:33.995248 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:33 crc kubenswrapper[4953]: I1011 03:25:33.995938 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:34 crc kubenswrapper[4953]: I1011 03:25:34.039565 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:34 crc kubenswrapper[4953]: I1011 03:25:34.731546 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:34 crc kubenswrapper[4953]: I1011 03:25:34.797061 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlnmt"] Oct 11 03:25:35 crc kubenswrapper[4953]: I1011 03:25:35.671067 4953 generic.go:334] "Generic (PLEG): container finished" podID="bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" containerID="c4d64a0fa00e13cc5cb9b8f442029fbb66faf60171aad61215c7ff4c7f5e432a" exitCode=0 Oct 11 03:25:35 crc kubenswrapper[4953]: I1011 03:25:35.671123 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" event={"ID":"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498","Type":"ContainerDied","Data":"c4d64a0fa00e13cc5cb9b8f442029fbb66faf60171aad61215c7ff4c7f5e432a"} Oct 11 03:25:36 crc kubenswrapper[4953]: I1011 03:25:36.686799 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mlnmt" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="registry-server" containerID="cri-o://8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d" gracePeriod=2 Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.158903 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.165058 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.349936 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-catalog-content\") pod \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.350128 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-utilities\") pod \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.350314 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ceph\") pod \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.350445 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-inventory\") pod \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.350747 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmfrc\" (UniqueName: \"kubernetes.io/projected/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-kube-api-access-rmfrc\") pod \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.351737 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-utilities" (OuterVolumeSpecName: "utilities") pod "46d4ccab-11f2-45fd-9349-5ea3dea295c0" (UID: "46d4ccab-11f2-45fd-9349-5ea3dea295c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.351873 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ssh-key\") pod \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\" (UID: \"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.351985 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6d7r\" (UniqueName: \"kubernetes.io/projected/46d4ccab-11f2-45fd-9349-5ea3dea295c0-kube-api-access-s6d7r\") pod \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\" (UID: \"46d4ccab-11f2-45fd-9349-5ea3dea295c0\") " Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.352505 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.357963 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46d4ccab-11f2-45fd-9349-5ea3dea295c0-kube-api-access-s6d7r" (OuterVolumeSpecName: "kube-api-access-s6d7r") pod "46d4ccab-11f2-45fd-9349-5ea3dea295c0" (UID: "46d4ccab-11f2-45fd-9349-5ea3dea295c0"). InnerVolumeSpecName "kube-api-access-s6d7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.359760 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-kube-api-access-rmfrc" (OuterVolumeSpecName: "kube-api-access-rmfrc") pod "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" (UID: "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498"). InnerVolumeSpecName "kube-api-access-rmfrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.359804 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ceph" (OuterVolumeSpecName: "ceph") pod "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" (UID: "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.372536 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "46d4ccab-11f2-45fd-9349-5ea3dea295c0" (UID: "46d4ccab-11f2-45fd-9349-5ea3dea295c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.383069 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" (UID: "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.396258 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-inventory" (OuterVolumeSpecName: "inventory") pod "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" (UID: "bf89bcbf-9d91-4ce7-8919-9b0c47b5d498"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.453361 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.453397 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmfrc\" (UniqueName: \"kubernetes.io/projected/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-kube-api-access-rmfrc\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.453411 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.453423 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6d7r\" (UniqueName: \"kubernetes.io/projected/46d4ccab-11f2-45fd-9349-5ea3dea295c0-kube-api-access-s6d7r\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.453435 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46d4ccab-11f2-45fd-9349-5ea3dea295c0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.453446 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf89bcbf-9d91-4ce7-8919-9b0c47b5d498-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.702226 4953 generic.go:334] "Generic (PLEG): container finished" podID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerID="8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d" exitCode=0 Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.703283 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlnmt" event={"ID":"46d4ccab-11f2-45fd-9349-5ea3dea295c0","Type":"ContainerDied","Data":"8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d"} Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.703517 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mlnmt" event={"ID":"46d4ccab-11f2-45fd-9349-5ea3dea295c0","Type":"ContainerDied","Data":"7e72b5b89f9f7d55360244dde286002cd28c2bebb8a375bdfeaaaaca68ac7dff"} Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.703585 4953 scope.go:117] "RemoveContainer" containerID="8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.703744 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mlnmt" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.705235 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" event={"ID":"bf89bcbf-9d91-4ce7-8919-9b0c47b5d498","Type":"ContainerDied","Data":"7506af4185f355e4dfd4ce6ccbe10d956e060c1e9d5ab2c4a8924aa8b4503d56"} Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.705372 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7506af4185f355e4dfd4ce6ccbe10d956e060c1e9d5ab2c4a8924aa8b4503d56" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.705284 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.747722 4953 scope.go:117] "RemoveContainer" containerID="2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.774889 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlnmt"] Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.786082 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mlnmt"] Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.787382 4953 scope.go:117] "RemoveContainer" containerID="175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.794868 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95"] Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.795285 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="extract-utilities" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.795306 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="extract-utilities" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.795332 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="registry-server" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.795341 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="registry-server" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.795352 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.795363 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.795382 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="extract-content" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.795390 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="extract-content" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.795616 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" containerName="registry-server" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.795642 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf89bcbf-9d91-4ce7-8919-9b0c47b5d498" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.796396 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.797622 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.797907 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.812923 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.812969 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.813015 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.813117 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.813292 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.818703 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46d4ccab-11f2-45fd-9349-5ea3dea295c0" path="/var/lib/kubelet/pods/46d4ccab-11f2-45fd-9349-5ea3dea295c0/volumes" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.819444 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95"] Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.821554 4953 scope.go:117] "RemoveContainer" containerID="8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.821974 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d\": container with ID starting with 8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d not found: ID does not exist" containerID="8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.822008 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d"} err="failed to get container status \"8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d\": rpc error: code = NotFound desc = could not find container \"8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d\": container with ID starting with 8f11059b7fa781d0cfdec5bc1d624044911827859ff4dbbbf665d559de7e321d not found: ID does not exist" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.822031 4953 scope.go:117] "RemoveContainer" containerID="2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.822255 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276\": container with ID starting with 2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276 not found: ID does not exist" containerID="2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.822347 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276"} err="failed to get container status \"2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276\": rpc error: code = NotFound desc = could not find container \"2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276\": container with ID starting with 2b9bc56ecc62142e9ada8d6d305aba0881ffbb5b9f25cb40b1fc32eb2581a276 not found: ID does not exist" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.822472 4953 scope.go:117] "RemoveContainer" containerID="175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81" Oct 11 03:25:37 crc kubenswrapper[4953]: E1011 03:25:37.825304 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81\": container with ID starting with 175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81 not found: ID does not exist" containerID="175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.825498 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81"} err="failed to get container status \"175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81\": rpc error: code = NotFound desc = could not find container \"175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81\": container with ID starting with 175f81c82f1f4539c648b85a6141009e25d9fcf246106f3bd3d047d993c67c81 not found: ID does not exist" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.860834 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.861069 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5rbk\" (UniqueName: \"kubernetes.io/projected/b64a4f35-42a0-4514-9da9-3319df4a9c6e-kube-api-access-g5rbk\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.861187 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.861438 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.963323 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.963369 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.963397 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5rbk\" (UniqueName: \"kubernetes.io/projected/b64a4f35-42a0-4514-9da9-3319df4a9c6e-kube-api-access-g5rbk\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.963421 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.968176 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.968263 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.968512 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:37 crc kubenswrapper[4953]: I1011 03:25:37.981044 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5rbk\" (UniqueName: \"kubernetes.io/projected/b64a4f35-42a0-4514-9da9-3319df4a9c6e-kube-api-access-g5rbk\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-d6b95\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:38 crc kubenswrapper[4953]: I1011 03:25:38.203181 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:25:38 crc kubenswrapper[4953]: I1011 03:25:38.752463 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95"] Oct 11 03:25:39 crc kubenswrapper[4953]: I1011 03:25:39.722273 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" event={"ID":"b64a4f35-42a0-4514-9da9-3319df4a9c6e","Type":"ContainerStarted","Data":"a49287b9cefbfef984abb264c3d2b3b4170ebef33735ea0321502eaf5de1d47d"} Oct 11 03:25:39 crc kubenswrapper[4953]: I1011 03:25:39.722954 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" event={"ID":"b64a4f35-42a0-4514-9da9-3319df4a9c6e","Type":"ContainerStarted","Data":"69d2122a21ea65e8d8fd9c77e9f53217cf914c6ad408211e59d2db2d6f91ab1f"} Oct 11 03:25:39 crc kubenswrapper[4953]: I1011 03:25:39.740030 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" podStartSLOduration=2.304101443 podStartE2EDuration="2.740012595s" podCreationTimestamp="2025-10-11 03:25:37 +0000 UTC" firstStartedPulling="2025-10-11 03:25:38.760724494 +0000 UTC m=+2349.693812138" lastFinishedPulling="2025-10-11 03:25:39.196635646 +0000 UTC m=+2350.129723290" observedRunningTime="2025-10-11 03:25:39.734803383 +0000 UTC m=+2350.667891027" watchObservedRunningTime="2025-10-11 03:25:39.740012595 +0000 UTC m=+2350.673100239" Oct 11 03:25:49 crc kubenswrapper[4953]: I1011 03:25:49.635743 4953 scope.go:117] "RemoveContainer" containerID="c0256f2a76964c6d88f26dde8103a51164ce6d301a7f8892a7a48bb2316d7625" Oct 11 03:25:49 crc kubenswrapper[4953]: I1011 03:25:49.673378 4953 scope.go:117] "RemoveContainer" containerID="5c94b3bc5f3685df28719c40756682616b36cf728d83ba21a15e34bd4c03e343" Oct 11 03:25:49 crc kubenswrapper[4953]: I1011 03:25:49.699984 4953 scope.go:117] "RemoveContainer" containerID="cdcba837a9d743950743e25e3a05ee67431f597cbd27d71f88057b1bdaaa0c3e" Oct 11 03:25:50 crc kubenswrapper[4953]: I1011 03:25:50.795519 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:25:50 crc kubenswrapper[4953]: E1011 03:25:50.796471 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:26:01 crc kubenswrapper[4953]: I1011 03:26:01.795397 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:26:01 crc kubenswrapper[4953]: E1011 03:26:01.796111 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:26:16 crc kubenswrapper[4953]: I1011 03:26:16.795952 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:26:16 crc kubenswrapper[4953]: E1011 03:26:16.797138 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:26:21 crc kubenswrapper[4953]: I1011 03:26:21.134887 4953 generic.go:334] "Generic (PLEG): container finished" podID="b64a4f35-42a0-4514-9da9-3319df4a9c6e" containerID="a49287b9cefbfef984abb264c3d2b3b4170ebef33735ea0321502eaf5de1d47d" exitCode=0 Oct 11 03:26:21 crc kubenswrapper[4953]: I1011 03:26:21.135412 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" event={"ID":"b64a4f35-42a0-4514-9da9-3319df4a9c6e","Type":"ContainerDied","Data":"a49287b9cefbfef984abb264c3d2b3b4170ebef33735ea0321502eaf5de1d47d"} Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.506453 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.680165 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ceph\") pod \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.680281 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5rbk\" (UniqueName: \"kubernetes.io/projected/b64a4f35-42a0-4514-9da9-3319df4a9c6e-kube-api-access-g5rbk\") pod \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.680356 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ssh-key\") pod \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.680426 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-inventory\") pod \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\" (UID: \"b64a4f35-42a0-4514-9da9-3319df4a9c6e\") " Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.685955 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b64a4f35-42a0-4514-9da9-3319df4a9c6e-kube-api-access-g5rbk" (OuterVolumeSpecName: "kube-api-access-g5rbk") pod "b64a4f35-42a0-4514-9da9-3319df4a9c6e" (UID: "b64a4f35-42a0-4514-9da9-3319df4a9c6e"). InnerVolumeSpecName "kube-api-access-g5rbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.688228 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ceph" (OuterVolumeSpecName: "ceph") pod "b64a4f35-42a0-4514-9da9-3319df4a9c6e" (UID: "b64a4f35-42a0-4514-9da9-3319df4a9c6e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.704814 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-inventory" (OuterVolumeSpecName: "inventory") pod "b64a4f35-42a0-4514-9da9-3319df4a9c6e" (UID: "b64a4f35-42a0-4514-9da9-3319df4a9c6e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.705278 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b64a4f35-42a0-4514-9da9-3319df4a9c6e" (UID: "b64a4f35-42a0-4514-9da9-3319df4a9c6e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.782750 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.782797 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.782810 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b64a4f35-42a0-4514-9da9-3319df4a9c6e-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:22 crc kubenswrapper[4953]: I1011 03:26:22.782822 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5rbk\" (UniqueName: \"kubernetes.io/projected/b64a4f35-42a0-4514-9da9-3319df4a9c6e-kube-api-access-g5rbk\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.153043 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" event={"ID":"b64a4f35-42a0-4514-9da9-3319df4a9c6e","Type":"ContainerDied","Data":"69d2122a21ea65e8d8fd9c77e9f53217cf914c6ad408211e59d2db2d6f91ab1f"} Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.153102 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69d2122a21ea65e8d8fd9c77e9f53217cf914c6ad408211e59d2db2d6f91ab1f" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.153200 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-d6b95" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.232641 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq"] Oct 11 03:26:23 crc kubenswrapper[4953]: E1011 03:26:23.233115 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64a4f35-42a0-4514-9da9-3319df4a9c6e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.233131 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64a4f35-42a0-4514-9da9-3319df4a9c6e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.233319 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64a4f35-42a0-4514-9da9-3319df4a9c6e" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.234031 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.243019 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq"] Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.299717 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.299921 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.300081 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.300200 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.300364 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.394058 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.394415 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6dc8\" (UniqueName: \"kubernetes.io/projected/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-kube-api-access-f6dc8\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.394450 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.394496 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.495918 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.495991 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6dc8\" (UniqueName: \"kubernetes.io/projected/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-kube-api-access-f6dc8\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.496044 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.496106 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.502778 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.503752 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.504167 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.513454 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6dc8\" (UniqueName: \"kubernetes.io/projected/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-kube-api-access-f6dc8\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:23 crc kubenswrapper[4953]: I1011 03:26:23.611711 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:24 crc kubenswrapper[4953]: I1011 03:26:24.114162 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq"] Oct 11 03:26:24 crc kubenswrapper[4953]: I1011 03:26:24.162390 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" event={"ID":"313dfcf2-f0a4-452e-9f91-dcc5ba79b581","Type":"ContainerStarted","Data":"da730a60ca87e18adfc74fb9f70c49f0854455a1312fea431c15acc1355c331d"} Oct 11 03:26:25 crc kubenswrapper[4953]: I1011 03:26:25.172761 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" event={"ID":"313dfcf2-f0a4-452e-9f91-dcc5ba79b581","Type":"ContainerStarted","Data":"ade9050065e6b6706c00916f237bf203d5eb38b730458a3c9d45d1c6e5276815"} Oct 11 03:26:25 crc kubenswrapper[4953]: I1011 03:26:25.196393 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" podStartSLOduration=1.758222536 podStartE2EDuration="2.196376964s" podCreationTimestamp="2025-10-11 03:26:23 +0000 UTC" firstStartedPulling="2025-10-11 03:26:24.123870208 +0000 UTC m=+2395.056957852" lastFinishedPulling="2025-10-11 03:26:24.562024636 +0000 UTC m=+2395.495112280" observedRunningTime="2025-10-11 03:26:25.192854105 +0000 UTC m=+2396.125941779" watchObservedRunningTime="2025-10-11 03:26:25.196376964 +0000 UTC m=+2396.129464608" Oct 11 03:26:27 crc kubenswrapper[4953]: I1011 03:26:27.795856 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:26:27 crc kubenswrapper[4953]: E1011 03:26:27.796995 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:26:29 crc kubenswrapper[4953]: I1011 03:26:29.214867 4953 generic.go:334] "Generic (PLEG): container finished" podID="313dfcf2-f0a4-452e-9f91-dcc5ba79b581" containerID="ade9050065e6b6706c00916f237bf203d5eb38b730458a3c9d45d1c6e5276815" exitCode=0 Oct 11 03:26:29 crc kubenswrapper[4953]: I1011 03:26:29.214968 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" event={"ID":"313dfcf2-f0a4-452e-9f91-dcc5ba79b581","Type":"ContainerDied","Data":"ade9050065e6b6706c00916f237bf203d5eb38b730458a3c9d45d1c6e5276815"} Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.571222 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.760640 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6dc8\" (UniqueName: \"kubernetes.io/projected/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-kube-api-access-f6dc8\") pod \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.760728 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ceph\") pod \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.760843 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-inventory\") pod \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.760968 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ssh-key\") pod \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\" (UID: \"313dfcf2-f0a4-452e-9f91-dcc5ba79b581\") " Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.779907 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ceph" (OuterVolumeSpecName: "ceph") pod "313dfcf2-f0a4-452e-9f91-dcc5ba79b581" (UID: "313dfcf2-f0a4-452e-9f91-dcc5ba79b581"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.779955 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-kube-api-access-f6dc8" (OuterVolumeSpecName: "kube-api-access-f6dc8") pod "313dfcf2-f0a4-452e-9f91-dcc5ba79b581" (UID: "313dfcf2-f0a4-452e-9f91-dcc5ba79b581"). InnerVolumeSpecName "kube-api-access-f6dc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.785813 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-inventory" (OuterVolumeSpecName: "inventory") pod "313dfcf2-f0a4-452e-9f91-dcc5ba79b581" (UID: "313dfcf2-f0a4-452e-9f91-dcc5ba79b581"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.787134 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "313dfcf2-f0a4-452e-9f91-dcc5ba79b581" (UID: "313dfcf2-f0a4-452e-9f91-dcc5ba79b581"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.863735 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.863776 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.863790 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6dc8\" (UniqueName: \"kubernetes.io/projected/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-kube-api-access-f6dc8\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:30 crc kubenswrapper[4953]: I1011 03:26:30.863806 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/313dfcf2-f0a4-452e-9f91-dcc5ba79b581-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.234170 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" event={"ID":"313dfcf2-f0a4-452e-9f91-dcc5ba79b581","Type":"ContainerDied","Data":"da730a60ca87e18adfc74fb9f70c49f0854455a1312fea431c15acc1355c331d"} Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.234567 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da730a60ca87e18adfc74fb9f70c49f0854455a1312fea431c15acc1355c331d" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.234238 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.305535 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp"] Oct 11 03:26:31 crc kubenswrapper[4953]: E1011 03:26:31.305944 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="313dfcf2-f0a4-452e-9f91-dcc5ba79b581" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.305968 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="313dfcf2-f0a4-452e-9f91-dcc5ba79b581" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.306319 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="313dfcf2-f0a4-452e-9f91-dcc5ba79b581" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.307186 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.311187 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.311671 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.311907 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.312100 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.312412 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.323297 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp"] Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.474014 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.474120 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.474160 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.474201 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ptfk\" (UniqueName: \"kubernetes.io/projected/9e449589-3a86-4765-b844-ff6acbb3edf1-kube-api-access-4ptfk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.576173 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.576284 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.576335 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.576382 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ptfk\" (UniqueName: \"kubernetes.io/projected/9e449589-3a86-4765-b844-ff6acbb3edf1-kube-api-access-4ptfk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.582344 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.582425 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.588383 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.590691 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ptfk\" (UniqueName: \"kubernetes.io/projected/9e449589-3a86-4765-b844-ff6acbb3edf1-kube-api-access-4ptfk\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-955dp\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:31 crc kubenswrapper[4953]: I1011 03:26:31.626623 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:26:32 crc kubenswrapper[4953]: I1011 03:26:32.127448 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp"] Oct 11 03:26:32 crc kubenswrapper[4953]: I1011 03:26:32.243575 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" event={"ID":"9e449589-3a86-4765-b844-ff6acbb3edf1","Type":"ContainerStarted","Data":"e55de32b37df07cfe21ed3ee5e441a1db8f69157da490ab6acef5a70dd044080"} Oct 11 03:26:33 crc kubenswrapper[4953]: I1011 03:26:33.256140 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" event={"ID":"9e449589-3a86-4765-b844-ff6acbb3edf1","Type":"ContainerStarted","Data":"9ba4080a309381aed675688146a0405de97c07dfea0fa8efeac58985c0d787f2"} Oct 11 03:26:33 crc kubenswrapper[4953]: I1011 03:26:33.299199 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" podStartSLOduration=1.774861908 podStartE2EDuration="2.299161095s" podCreationTimestamp="2025-10-11 03:26:31 +0000 UTC" firstStartedPulling="2025-10-11 03:26:32.140332315 +0000 UTC m=+2403.073419959" lastFinishedPulling="2025-10-11 03:26:32.664631462 +0000 UTC m=+2403.597719146" observedRunningTime="2025-10-11 03:26:33.277765515 +0000 UTC m=+2404.210853209" watchObservedRunningTime="2025-10-11 03:26:33.299161095 +0000 UTC m=+2404.232248789" Oct 11 03:26:38 crc kubenswrapper[4953]: I1011 03:26:38.795220 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:26:38 crc kubenswrapper[4953]: E1011 03:26:38.796139 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:26:50 crc kubenswrapper[4953]: I1011 03:26:50.795481 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:26:50 crc kubenswrapper[4953]: E1011 03:26:50.796816 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.116721 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z89bz"] Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.119617 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.122753 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z89bz"] Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.296632 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xmjc\" (UniqueName: \"kubernetes.io/projected/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-kube-api-access-2xmjc\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.296697 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-catalog-content\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.296737 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-utilities\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.399029 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xmjc\" (UniqueName: \"kubernetes.io/projected/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-kube-api-access-2xmjc\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.399122 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-catalog-content\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.399186 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-utilities\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.399964 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-utilities\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.400770 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-catalog-content\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.430314 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xmjc\" (UniqueName: \"kubernetes.io/projected/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-kube-api-access-2xmjc\") pod \"redhat-operators-z89bz\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.446222 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:26:55 crc kubenswrapper[4953]: I1011 03:26:55.972879 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z89bz"] Oct 11 03:26:56 crc kubenswrapper[4953]: I1011 03:26:56.505561 4953 generic.go:334] "Generic (PLEG): container finished" podID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerID="75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523" exitCode=0 Oct 11 03:26:56 crc kubenswrapper[4953]: I1011 03:26:56.505631 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z89bz" event={"ID":"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b","Type":"ContainerDied","Data":"75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523"} Oct 11 03:26:56 crc kubenswrapper[4953]: I1011 03:26:56.505682 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z89bz" event={"ID":"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b","Type":"ContainerStarted","Data":"037479ca38d89cb44d3d5fe494628cf2e972dbf57101a1e1d1dfe87562a97f98"} Oct 11 03:26:58 crc kubenswrapper[4953]: I1011 03:26:58.526127 4953 generic.go:334] "Generic (PLEG): container finished" podID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerID="6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2" exitCode=0 Oct 11 03:26:58 crc kubenswrapper[4953]: I1011 03:26:58.526348 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z89bz" event={"ID":"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b","Type":"ContainerDied","Data":"6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2"} Oct 11 03:26:59 crc kubenswrapper[4953]: I1011 03:26:59.546173 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z89bz" event={"ID":"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b","Type":"ContainerStarted","Data":"80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6"} Oct 11 03:26:59 crc kubenswrapper[4953]: I1011 03:26:59.565093 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z89bz" podStartSLOduration=2.048352511 podStartE2EDuration="4.565075594s" podCreationTimestamp="2025-10-11 03:26:55 +0000 UTC" firstStartedPulling="2025-10-11 03:26:56.509069726 +0000 UTC m=+2427.442157400" lastFinishedPulling="2025-10-11 03:26:59.025792839 +0000 UTC m=+2429.958880483" observedRunningTime="2025-10-11 03:26:59.563110755 +0000 UTC m=+2430.496198409" watchObservedRunningTime="2025-10-11 03:26:59.565075594 +0000 UTC m=+2430.498163228" Oct 11 03:27:01 crc kubenswrapper[4953]: I1011 03:27:01.796025 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:27:01 crc kubenswrapper[4953]: E1011 03:27:01.796598 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:27:05 crc kubenswrapper[4953]: I1011 03:27:05.447497 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:27:05 crc kubenswrapper[4953]: I1011 03:27:05.448237 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:27:05 crc kubenswrapper[4953]: I1011 03:27:05.506947 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:27:05 crc kubenswrapper[4953]: I1011 03:27:05.646879 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:27:05 crc kubenswrapper[4953]: I1011 03:27:05.882779 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z89bz"] Oct 11 03:27:07 crc kubenswrapper[4953]: I1011 03:27:07.617819 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z89bz" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="registry-server" containerID="cri-o://80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6" gracePeriod=2 Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.083375 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.241885 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xmjc\" (UniqueName: \"kubernetes.io/projected/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-kube-api-access-2xmjc\") pod \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.242103 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-catalog-content\") pod \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.242179 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-utilities\") pod \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\" (UID: \"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b\") " Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.243206 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-utilities" (OuterVolumeSpecName: "utilities") pod "22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" (UID: "22ef0c42-0e95-45c8-908a-6ae6a18f8b1b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.247618 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-kube-api-access-2xmjc" (OuterVolumeSpecName: "kube-api-access-2xmjc") pod "22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" (UID: "22ef0c42-0e95-45c8-908a-6ae6a18f8b1b"). InnerVolumeSpecName "kube-api-access-2xmjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.344047 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xmjc\" (UniqueName: \"kubernetes.io/projected/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-kube-api-access-2xmjc\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.344084 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.630062 4953 generic.go:334] "Generic (PLEG): container finished" podID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerID="80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6" exitCode=0 Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.630110 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z89bz" event={"ID":"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b","Type":"ContainerDied","Data":"80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6"} Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.630118 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z89bz" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.630182 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z89bz" event={"ID":"22ef0c42-0e95-45c8-908a-6ae6a18f8b1b","Type":"ContainerDied","Data":"037479ca38d89cb44d3d5fe494628cf2e972dbf57101a1e1d1dfe87562a97f98"} Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.630205 4953 scope.go:117] "RemoveContainer" containerID="80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.662107 4953 scope.go:117] "RemoveContainer" containerID="6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.682251 4953 scope.go:117] "RemoveContainer" containerID="75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.730063 4953 scope.go:117] "RemoveContainer" containerID="80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6" Oct 11 03:27:08 crc kubenswrapper[4953]: E1011 03:27:08.730560 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6\": container with ID starting with 80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6 not found: ID does not exist" containerID="80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.730616 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6"} err="failed to get container status \"80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6\": rpc error: code = NotFound desc = could not find container \"80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6\": container with ID starting with 80fd4e7e4b2a239591a271d8a08d6ba8e5213d2bcf90fefd837f9b7e164ce7e6 not found: ID does not exist" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.730641 4953 scope.go:117] "RemoveContainer" containerID="6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2" Oct 11 03:27:08 crc kubenswrapper[4953]: E1011 03:27:08.730965 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2\": container with ID starting with 6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2 not found: ID does not exist" containerID="6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.731000 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2"} err="failed to get container status \"6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2\": rpc error: code = NotFound desc = could not find container \"6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2\": container with ID starting with 6f1574563241787b11dd5781e557608a5322927dd3d4b97081bd60502bb102f2 not found: ID does not exist" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.731015 4953 scope.go:117] "RemoveContainer" containerID="75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523" Oct 11 03:27:08 crc kubenswrapper[4953]: E1011 03:27:08.731316 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523\": container with ID starting with 75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523 not found: ID does not exist" containerID="75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523" Oct 11 03:27:08 crc kubenswrapper[4953]: I1011 03:27:08.731341 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523"} err="failed to get container status \"75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523\": rpc error: code = NotFound desc = could not find container \"75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523\": container with ID starting with 75458d6b73416b70f001f771b2ce58b3cfc8b22f2b216a2624f902f38186e523 not found: ID does not exist" Oct 11 03:27:09 crc kubenswrapper[4953]: I1011 03:27:09.886877 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" (UID: "22ef0c42-0e95-45c8-908a-6ae6a18f8b1b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:27:09 crc kubenswrapper[4953]: I1011 03:27:09.986060 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:10 crc kubenswrapper[4953]: I1011 03:27:10.183766 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z89bz"] Oct 11 03:27:10 crc kubenswrapper[4953]: I1011 03:27:10.198360 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z89bz"] Oct 11 03:27:11 crc kubenswrapper[4953]: I1011 03:27:11.813858 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" path="/var/lib/kubelet/pods/22ef0c42-0e95-45c8-908a-6ae6a18f8b1b/volumes" Oct 11 03:27:14 crc kubenswrapper[4953]: I1011 03:27:14.796041 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:27:14 crc kubenswrapper[4953]: E1011 03:27:14.796820 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:27:21 crc kubenswrapper[4953]: I1011 03:27:21.774234 4953 generic.go:334] "Generic (PLEG): container finished" podID="9e449589-3a86-4765-b844-ff6acbb3edf1" containerID="9ba4080a309381aed675688146a0405de97c07dfea0fa8efeac58985c0d787f2" exitCode=0 Oct 11 03:27:21 crc kubenswrapper[4953]: I1011 03:27:21.774372 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" event={"ID":"9e449589-3a86-4765-b844-ff6acbb3edf1","Type":"ContainerDied","Data":"9ba4080a309381aed675688146a0405de97c07dfea0fa8efeac58985c0d787f2"} Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.292913 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.493361 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ceph\") pod \"9e449589-3a86-4765-b844-ff6acbb3edf1\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.493415 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ssh-key\") pod \"9e449589-3a86-4765-b844-ff6acbb3edf1\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.493576 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ptfk\" (UniqueName: \"kubernetes.io/projected/9e449589-3a86-4765-b844-ff6acbb3edf1-kube-api-access-4ptfk\") pod \"9e449589-3a86-4765-b844-ff6acbb3edf1\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.493594 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-inventory\") pod \"9e449589-3a86-4765-b844-ff6acbb3edf1\" (UID: \"9e449589-3a86-4765-b844-ff6acbb3edf1\") " Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.502901 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e449589-3a86-4765-b844-ff6acbb3edf1-kube-api-access-4ptfk" (OuterVolumeSpecName: "kube-api-access-4ptfk") pod "9e449589-3a86-4765-b844-ff6acbb3edf1" (UID: "9e449589-3a86-4765-b844-ff6acbb3edf1"). InnerVolumeSpecName "kube-api-access-4ptfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.514754 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ceph" (OuterVolumeSpecName: "ceph") pod "9e449589-3a86-4765-b844-ff6acbb3edf1" (UID: "9e449589-3a86-4765-b844-ff6acbb3edf1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.541857 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-inventory" (OuterVolumeSpecName: "inventory") pod "9e449589-3a86-4765-b844-ff6acbb3edf1" (UID: "9e449589-3a86-4765-b844-ff6acbb3edf1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.544721 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9e449589-3a86-4765-b844-ff6acbb3edf1" (UID: "9e449589-3a86-4765-b844-ff6acbb3edf1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.595773 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.595809 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.595819 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ptfk\" (UniqueName: \"kubernetes.io/projected/9e449589-3a86-4765-b844-ff6acbb3edf1-kube-api-access-4ptfk\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.595828 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e449589-3a86-4765-b844-ff6acbb3edf1-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.811490 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" event={"ID":"9e449589-3a86-4765-b844-ff6acbb3edf1","Type":"ContainerDied","Data":"e55de32b37df07cfe21ed3ee5e441a1db8f69157da490ab6acef5a70dd044080"} Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.811535 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e55de32b37df07cfe21ed3ee5e441a1db8f69157da490ab6acef5a70dd044080" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.812726 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-955dp" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.878295 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5jd92"] Oct 11 03:27:23 crc kubenswrapper[4953]: E1011 03:27:23.878862 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e449589-3a86-4765-b844-ff6acbb3edf1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.878889 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e449589-3a86-4765-b844-ff6acbb3edf1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:27:23 crc kubenswrapper[4953]: E1011 03:27:23.878901 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="registry-server" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.878911 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="registry-server" Oct 11 03:27:23 crc kubenswrapper[4953]: E1011 03:27:23.878938 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="extract-content" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.878946 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="extract-content" Oct 11 03:27:23 crc kubenswrapper[4953]: E1011 03:27:23.878963 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="extract-utilities" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.878975 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="extract-utilities" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.879178 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="22ef0c42-0e95-45c8-908a-6ae6a18f8b1b" containerName="registry-server" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.879203 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e449589-3a86-4765-b844-ff6acbb3edf1" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.880044 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.882082 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.882402 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.882483 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.882974 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.883146 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.889065 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5jd92"] Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.902410 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.902532 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.902582 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ceph\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:23 crc kubenswrapper[4953]: I1011 03:27:23.902631 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7x4m\" (UniqueName: \"kubernetes.io/projected/1f22d2a9-5546-4397-816f-000c90554995-kube-api-access-x7x4m\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.004380 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.004469 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.004528 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ceph\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.004570 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7x4m\" (UniqueName: \"kubernetes.io/projected/1f22d2a9-5546-4397-816f-000c90554995-kube-api-access-x7x4m\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.007909 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.008220 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.008836 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ceph\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.019695 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7x4m\" (UniqueName: \"kubernetes.io/projected/1f22d2a9-5546-4397-816f-000c90554995-kube-api-access-x7x4m\") pod \"ssh-known-hosts-edpm-deployment-5jd92\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.200108 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.764450 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5jd92"] Oct 11 03:27:24 crc kubenswrapper[4953]: I1011 03:27:24.821135 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" event={"ID":"1f22d2a9-5546-4397-816f-000c90554995","Type":"ContainerStarted","Data":"483e41ceec7b5514b17c5b3172b8c8a559a541b63ca24e82b2dc7e225f416e06"} Oct 11 03:27:25 crc kubenswrapper[4953]: I1011 03:27:25.837841 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" event={"ID":"1f22d2a9-5546-4397-816f-000c90554995","Type":"ContainerStarted","Data":"358f628ddb3c2afed18d65f0286f5265c90140c58cae00dfba1e116e31f08dea"} Oct 11 03:27:25 crc kubenswrapper[4953]: I1011 03:27:25.854884 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" podStartSLOduration=2.363266928 podStartE2EDuration="2.854868968s" podCreationTimestamp="2025-10-11 03:27:23 +0000 UTC" firstStartedPulling="2025-10-11 03:27:24.778254208 +0000 UTC m=+2455.711341852" lastFinishedPulling="2025-10-11 03:27:25.269856248 +0000 UTC m=+2456.202943892" observedRunningTime="2025-10-11 03:27:25.853923974 +0000 UTC m=+2456.787011628" watchObservedRunningTime="2025-10-11 03:27:25.854868968 +0000 UTC m=+2456.787956612" Oct 11 03:27:26 crc kubenswrapper[4953]: I1011 03:27:26.796060 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:27:26 crc kubenswrapper[4953]: E1011 03:27:26.797000 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:27:35 crc kubenswrapper[4953]: I1011 03:27:35.917774 4953 generic.go:334] "Generic (PLEG): container finished" podID="1f22d2a9-5546-4397-816f-000c90554995" containerID="358f628ddb3c2afed18d65f0286f5265c90140c58cae00dfba1e116e31f08dea" exitCode=0 Oct 11 03:27:35 crc kubenswrapper[4953]: I1011 03:27:35.917916 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" event={"ID":"1f22d2a9-5546-4397-816f-000c90554995","Type":"ContainerDied","Data":"358f628ddb3c2afed18d65f0286f5265c90140c58cae00dfba1e116e31f08dea"} Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.347617 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.447531 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ceph\") pod \"1f22d2a9-5546-4397-816f-000c90554995\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.447575 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ssh-key-openstack-edpm-ipam\") pod \"1f22d2a9-5546-4397-816f-000c90554995\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.447716 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-inventory-0\") pod \"1f22d2a9-5546-4397-816f-000c90554995\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.447813 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7x4m\" (UniqueName: \"kubernetes.io/projected/1f22d2a9-5546-4397-816f-000c90554995-kube-api-access-x7x4m\") pod \"1f22d2a9-5546-4397-816f-000c90554995\" (UID: \"1f22d2a9-5546-4397-816f-000c90554995\") " Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.453054 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ceph" (OuterVolumeSpecName: "ceph") pod "1f22d2a9-5546-4397-816f-000c90554995" (UID: "1f22d2a9-5546-4397-816f-000c90554995"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.458835 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f22d2a9-5546-4397-816f-000c90554995-kube-api-access-x7x4m" (OuterVolumeSpecName: "kube-api-access-x7x4m") pod "1f22d2a9-5546-4397-816f-000c90554995" (UID: "1f22d2a9-5546-4397-816f-000c90554995"). InnerVolumeSpecName "kube-api-access-x7x4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.477782 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "1f22d2a9-5546-4397-816f-000c90554995" (UID: "1f22d2a9-5546-4397-816f-000c90554995"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.480938 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1f22d2a9-5546-4397-816f-000c90554995" (UID: "1f22d2a9-5546-4397-816f-000c90554995"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.550313 4953 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.550356 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7x4m\" (UniqueName: \"kubernetes.io/projected/1f22d2a9-5546-4397-816f-000c90554995-kube-api-access-x7x4m\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.550370 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.550382 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f22d2a9-5546-4397-816f-000c90554995-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.796677 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:27:37 crc kubenswrapper[4953]: E1011 03:27:37.796942 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.934205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" event={"ID":"1f22d2a9-5546-4397-816f-000c90554995","Type":"ContainerDied","Data":"483e41ceec7b5514b17c5b3172b8c8a559a541b63ca24e82b2dc7e225f416e06"} Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.934251 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="483e41ceec7b5514b17c5b3172b8c8a559a541b63ca24e82b2dc7e225f416e06" Oct 11 03:27:37 crc kubenswrapper[4953]: I1011 03:27:37.934273 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5jd92" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.004774 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs"] Oct 11 03:27:38 crc kubenswrapper[4953]: E1011 03:27:38.005550 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f22d2a9-5546-4397-816f-000c90554995" containerName="ssh-known-hosts-edpm-deployment" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.005575 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f22d2a9-5546-4397-816f-000c90554995" containerName="ssh-known-hosts-edpm-deployment" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.005873 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f22d2a9-5546-4397-816f-000c90554995" containerName="ssh-known-hosts-edpm-deployment" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.006625 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.008412 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.008668 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.009100 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.009383 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.009558 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.015030 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs"] Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.172673 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.172718 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.172743 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9pbz\" (UniqueName: \"kubernetes.io/projected/5b1bf278-87d7-4410-9cdb-54583890f234-kube-api-access-n9pbz\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.172822 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.274518 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.274568 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.274595 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9pbz\" (UniqueName: \"kubernetes.io/projected/5b1bf278-87d7-4410-9cdb-54583890f234-kube-api-access-n9pbz\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.274667 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.280420 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.281839 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.283144 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.292524 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9pbz\" (UniqueName: \"kubernetes.io/projected/5b1bf278-87d7-4410-9cdb-54583890f234-kube-api-access-n9pbz\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-bmjbs\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.371883 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.892717 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs"] Oct 11 03:27:38 crc kubenswrapper[4953]: I1011 03:27:38.945102 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" event={"ID":"5b1bf278-87d7-4410-9cdb-54583890f234","Type":"ContainerStarted","Data":"04f455255f7ee9ed65974d2701a1df6ab6c51c05633b1cb35f5a8854f92a7a0b"} Oct 11 03:27:39 crc kubenswrapper[4953]: I1011 03:27:39.954059 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" event={"ID":"5b1bf278-87d7-4410-9cdb-54583890f234","Type":"ContainerStarted","Data":"b5636229ea3004a9e072bc527c9adc26d14d7912bc066abd2eea33c0dcd0c414"} Oct 11 03:27:39 crc kubenswrapper[4953]: I1011 03:27:39.976250 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" podStartSLOduration=2.215894526 podStartE2EDuration="2.97622846s" podCreationTimestamp="2025-10-11 03:27:37 +0000 UTC" firstStartedPulling="2025-10-11 03:27:38.899920008 +0000 UTC m=+2469.833007652" lastFinishedPulling="2025-10-11 03:27:39.660253942 +0000 UTC m=+2470.593341586" observedRunningTime="2025-10-11 03:27:39.971118721 +0000 UTC m=+2470.904206365" watchObservedRunningTime="2025-10-11 03:27:39.97622846 +0000 UTC m=+2470.909316104" Oct 11 03:27:49 crc kubenswrapper[4953]: I1011 03:27:49.037250 4953 generic.go:334] "Generic (PLEG): container finished" podID="5b1bf278-87d7-4410-9cdb-54583890f234" containerID="b5636229ea3004a9e072bc527c9adc26d14d7912bc066abd2eea33c0dcd0c414" exitCode=0 Oct 11 03:27:49 crc kubenswrapper[4953]: I1011 03:27:49.037337 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" event={"ID":"5b1bf278-87d7-4410-9cdb-54583890f234","Type":"ContainerDied","Data":"b5636229ea3004a9e072bc527c9adc26d14d7912bc066abd2eea33c0dcd0c414"} Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.508745 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.598706 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-inventory\") pod \"5b1bf278-87d7-4410-9cdb-54583890f234\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.598826 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9pbz\" (UniqueName: \"kubernetes.io/projected/5b1bf278-87d7-4410-9cdb-54583890f234-kube-api-access-n9pbz\") pod \"5b1bf278-87d7-4410-9cdb-54583890f234\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.598851 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ssh-key\") pod \"5b1bf278-87d7-4410-9cdb-54583890f234\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.598989 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ceph\") pod \"5b1bf278-87d7-4410-9cdb-54583890f234\" (UID: \"5b1bf278-87d7-4410-9cdb-54583890f234\") " Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.603738 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ceph" (OuterVolumeSpecName: "ceph") pod "5b1bf278-87d7-4410-9cdb-54583890f234" (UID: "5b1bf278-87d7-4410-9cdb-54583890f234"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.605467 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b1bf278-87d7-4410-9cdb-54583890f234-kube-api-access-n9pbz" (OuterVolumeSpecName: "kube-api-access-n9pbz") pod "5b1bf278-87d7-4410-9cdb-54583890f234" (UID: "5b1bf278-87d7-4410-9cdb-54583890f234"). InnerVolumeSpecName "kube-api-access-n9pbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.624626 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5b1bf278-87d7-4410-9cdb-54583890f234" (UID: "5b1bf278-87d7-4410-9cdb-54583890f234"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.625021 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-inventory" (OuterVolumeSpecName: "inventory") pod "5b1bf278-87d7-4410-9cdb-54583890f234" (UID: "5b1bf278-87d7-4410-9cdb-54583890f234"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.701949 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.701993 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9pbz\" (UniqueName: \"kubernetes.io/projected/5b1bf278-87d7-4410-9cdb-54583890f234-kube-api-access-n9pbz\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.702008 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:50 crc kubenswrapper[4953]: I1011 03:27:50.702018 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b1bf278-87d7-4410-9cdb-54583890f234-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.059577 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" event={"ID":"5b1bf278-87d7-4410-9cdb-54583890f234","Type":"ContainerDied","Data":"04f455255f7ee9ed65974d2701a1df6ab6c51c05633b1cb35f5a8854f92a7a0b"} Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.059630 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04f455255f7ee9ed65974d2701a1df6ab6c51c05633b1cb35f5a8854f92a7a0b" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.059745 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-bmjbs" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.144105 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2"] Oct 11 03:27:51 crc kubenswrapper[4953]: E1011 03:27:51.144458 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b1bf278-87d7-4410-9cdb-54583890f234" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.144474 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b1bf278-87d7-4410-9cdb-54583890f234" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.144646 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b1bf278-87d7-4410-9cdb-54583890f234" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.145193 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.147900 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.148880 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.149312 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.149780 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.169123 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.172580 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2"] Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.211408 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.211535 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzwjg\" (UniqueName: \"kubernetes.io/projected/e20566a0-5f44-415c-9364-6b64a89f82e8-kube-api-access-zzwjg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.211669 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.211761 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.313788 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.313895 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.313949 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzwjg\" (UniqueName: \"kubernetes.io/projected/e20566a0-5f44-415c-9364-6b64a89f82e8-kube-api-access-zzwjg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.314000 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.317145 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.317570 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.320665 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.330773 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzwjg\" (UniqueName: \"kubernetes.io/projected/e20566a0-5f44-415c-9364-6b64a89f82e8-kube-api-access-zzwjg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.472506 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.795905 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:27:51 crc kubenswrapper[4953]: E1011 03:27:51.796466 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:27:51 crc kubenswrapper[4953]: I1011 03:27:51.955472 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2"] Oct 11 03:27:52 crc kubenswrapper[4953]: I1011 03:27:52.071123 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" event={"ID":"e20566a0-5f44-415c-9364-6b64a89f82e8","Type":"ContainerStarted","Data":"6574325d8f3e23eba48cd95fc58ad5971d487a751388a65bf78d2f90f4e789a7"} Oct 11 03:27:53 crc kubenswrapper[4953]: I1011 03:27:53.082592 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" event={"ID":"e20566a0-5f44-415c-9364-6b64a89f82e8","Type":"ContainerStarted","Data":"db0093f50b80b45b7bc40e0ed607e54262a8bb0bcd042721b1e464a7f2109b50"} Oct 11 03:27:53 crc kubenswrapper[4953]: I1011 03:27:53.113753 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" podStartSLOduration=1.6836633619999999 podStartE2EDuration="2.113727219s" podCreationTimestamp="2025-10-11 03:27:51 +0000 UTC" firstStartedPulling="2025-10-11 03:27:51.965591231 +0000 UTC m=+2482.898678885" lastFinishedPulling="2025-10-11 03:27:52.395655098 +0000 UTC m=+2483.328742742" observedRunningTime="2025-10-11 03:27:53.099506092 +0000 UTC m=+2484.032593746" watchObservedRunningTime="2025-10-11 03:27:53.113727219 +0000 UTC m=+2484.046814883" Oct 11 03:28:02 crc kubenswrapper[4953]: I1011 03:28:02.163250 4953 generic.go:334] "Generic (PLEG): container finished" podID="e20566a0-5f44-415c-9364-6b64a89f82e8" containerID="db0093f50b80b45b7bc40e0ed607e54262a8bb0bcd042721b1e464a7f2109b50" exitCode=0 Oct 11 03:28:02 crc kubenswrapper[4953]: I1011 03:28:02.163366 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" event={"ID":"e20566a0-5f44-415c-9364-6b64a89f82e8","Type":"ContainerDied","Data":"db0093f50b80b45b7bc40e0ed607e54262a8bb0bcd042721b1e464a7f2109b50"} Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.670189 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.744338 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzwjg\" (UniqueName: \"kubernetes.io/projected/e20566a0-5f44-415c-9364-6b64a89f82e8-kube-api-access-zzwjg\") pod \"e20566a0-5f44-415c-9364-6b64a89f82e8\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.744429 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ceph\") pod \"e20566a0-5f44-415c-9364-6b64a89f82e8\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.744517 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-inventory\") pod \"e20566a0-5f44-415c-9364-6b64a89f82e8\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.744736 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ssh-key\") pod \"e20566a0-5f44-415c-9364-6b64a89f82e8\" (UID: \"e20566a0-5f44-415c-9364-6b64a89f82e8\") " Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.751632 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ceph" (OuterVolumeSpecName: "ceph") pod "e20566a0-5f44-415c-9364-6b64a89f82e8" (UID: "e20566a0-5f44-415c-9364-6b64a89f82e8"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.752347 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e20566a0-5f44-415c-9364-6b64a89f82e8-kube-api-access-zzwjg" (OuterVolumeSpecName: "kube-api-access-zzwjg") pod "e20566a0-5f44-415c-9364-6b64a89f82e8" (UID: "e20566a0-5f44-415c-9364-6b64a89f82e8"). InnerVolumeSpecName "kube-api-access-zzwjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.770950 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e20566a0-5f44-415c-9364-6b64a89f82e8" (UID: "e20566a0-5f44-415c-9364-6b64a89f82e8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.780559 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-inventory" (OuterVolumeSpecName: "inventory") pod "e20566a0-5f44-415c-9364-6b64a89f82e8" (UID: "e20566a0-5f44-415c-9364-6b64a89f82e8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.796872 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:28:03 crc kubenswrapper[4953]: E1011 03:28:03.797390 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.847566 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.847618 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzwjg\" (UniqueName: \"kubernetes.io/projected/e20566a0-5f44-415c-9364-6b64a89f82e8-kube-api-access-zzwjg\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.847634 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:03 crc kubenswrapper[4953]: I1011 03:28:03.847645 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e20566a0-5f44-415c-9364-6b64a89f82e8-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.185406 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" event={"ID":"e20566a0-5f44-415c-9364-6b64a89f82e8","Type":"ContainerDied","Data":"6574325d8f3e23eba48cd95fc58ad5971d487a751388a65bf78d2f90f4e789a7"} Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.185454 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6574325d8f3e23eba48cd95fc58ad5971d487a751388a65bf78d2f90f4e789a7" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.185503 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.266800 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464"] Oct 11 03:28:04 crc kubenswrapper[4953]: E1011 03:28:04.267126 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e20566a0-5f44-415c-9364-6b64a89f82e8" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.267146 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e20566a0-5f44-415c-9364-6b64a89f82e8" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.267326 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e20566a0-5f44-415c-9364-6b64a89f82e8" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.268981 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.271561 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.271845 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.271897 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.272068 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.272594 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.272660 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.272680 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.273777 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.321227 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464"] Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.356533 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.356573 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.356599 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.356777 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.356827 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357083 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357234 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357264 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357320 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxcjn\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-kube-api-access-kxcjn\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357378 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357404 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357459 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.357493 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.459761 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460357 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460428 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460502 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460687 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460748 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460827 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxcjn\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-kube-api-access-kxcjn\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460909 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.460967 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.461068 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.461126 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.462838 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.462961 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.464774 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.465675 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.465824 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.466831 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.467674 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.468301 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.468398 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.472412 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.473172 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.473704 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.474356 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.475339 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.481993 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxcjn\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-kube-api-access-kxcjn\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-45464\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:04 crc kubenswrapper[4953]: I1011 03:28:04.584751 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:05 crc kubenswrapper[4953]: I1011 03:28:05.215002 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464"] Oct 11 03:28:06 crc kubenswrapper[4953]: I1011 03:28:06.203535 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" event={"ID":"b52f2621-2d4c-4196-9ede-38eede157dd9","Type":"ContainerStarted","Data":"eecbb4ea15824920003cc9d4afc754de189412926b422512b8fff601e22cb780"} Oct 11 03:28:06 crc kubenswrapper[4953]: I1011 03:28:06.204135 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" event={"ID":"b52f2621-2d4c-4196-9ede-38eede157dd9","Type":"ContainerStarted","Data":"4b8c8911a9d73efa85a8507e9544f3062a7c68578e1d44b8e4035e7f063e170c"} Oct 11 03:28:16 crc kubenswrapper[4953]: I1011 03:28:16.795482 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:28:16 crc kubenswrapper[4953]: E1011 03:28:16.796284 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:28:30 crc kubenswrapper[4953]: I1011 03:28:30.795440 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:28:30 crc kubenswrapper[4953]: E1011 03:28:30.796316 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:28:41 crc kubenswrapper[4953]: I1011 03:28:41.541494 4953 generic.go:334] "Generic (PLEG): container finished" podID="b52f2621-2d4c-4196-9ede-38eede157dd9" containerID="eecbb4ea15824920003cc9d4afc754de189412926b422512b8fff601e22cb780" exitCode=0 Oct 11 03:28:41 crc kubenswrapper[4953]: I1011 03:28:41.541569 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" event={"ID":"b52f2621-2d4c-4196-9ede-38eede157dd9","Type":"ContainerDied","Data":"eecbb4ea15824920003cc9d4afc754de189412926b422512b8fff601e22cb780"} Oct 11 03:28:42 crc kubenswrapper[4953]: I1011 03:28:42.795528 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.052886 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225062 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-libvirt-combined-ca-bundle\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225181 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ovn-combined-ca-bundle\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225222 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-ovn-default-certs-0\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225294 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-nova-combined-ca-bundle\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225323 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-bootstrap-combined-ca-bundle\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225367 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ceph\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225406 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225461 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-neutron-metadata-combined-ca-bundle\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225584 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxcjn\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-kube-api-access-kxcjn\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225693 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ssh-key\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225821 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225863 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-repo-setup-combined-ca-bundle\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.225938 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-inventory\") pod \"b52f2621-2d4c-4196-9ede-38eede157dd9\" (UID: \"b52f2621-2d4c-4196-9ede-38eede157dd9\") " Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.231431 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.232032 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ceph" (OuterVolumeSpecName: "ceph") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.232163 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.232760 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.232798 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.233598 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.234275 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.234382 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-kube-api-access-kxcjn" (OuterVolumeSpecName: "kube-api-access-kxcjn") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "kube-api-access-kxcjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.234626 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.234947 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.238959 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.264672 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-inventory" (OuterVolumeSpecName: "inventory") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.287353 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b52f2621-2d4c-4196-9ede-38eede157dd9" (UID: "b52f2621-2d4c-4196-9ede-38eede157dd9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328344 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxcjn\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-kube-api-access-kxcjn\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328384 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328398 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328412 4953 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328425 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328436 4953 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328450 4953 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328462 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328474 4953 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328486 4953 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328497 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328508 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b52f2621-2d4c-4196-9ede-38eede157dd9-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.328520 4953 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b52f2621-2d4c-4196-9ede-38eede157dd9-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.559996 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" event={"ID":"b52f2621-2d4c-4196-9ede-38eede157dd9","Type":"ContainerDied","Data":"4b8c8911a9d73efa85a8507e9544f3062a7c68578e1d44b8e4035e7f063e170c"} Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.560054 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b8c8911a9d73efa85a8507e9544f3062a7c68578e1d44b8e4035e7f063e170c" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.560023 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-45464" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.562404 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"573e359637a61666d2b2bc5011f914b75dfb1cd4b6a8cd9c705dc96edf5e5c5c"} Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.690428 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq"] Oct 11 03:28:43 crc kubenswrapper[4953]: E1011 03:28:43.691083 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b52f2621-2d4c-4196-9ede-38eede157dd9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.691184 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b52f2621-2d4c-4196-9ede-38eede157dd9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.691524 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b52f2621-2d4c-4196-9ede-38eede157dd9" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.692427 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.694358 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.695588 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.695890 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.695897 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.696932 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.699112 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq"] Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.748061 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.748274 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6kc4\" (UniqueName: \"kubernetes.io/projected/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-kube-api-access-m6kc4\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.748412 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.748559 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.851928 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.852627 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.852733 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.852811 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6kc4\" (UniqueName: \"kubernetes.io/projected/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-kube-api-access-m6kc4\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.855953 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.856241 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.859856 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:43 crc kubenswrapper[4953]: I1011 03:28:43.870150 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6kc4\" (UniqueName: \"kubernetes.io/projected/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-kube-api-access-m6kc4\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:44 crc kubenswrapper[4953]: I1011 03:28:44.014930 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:44 crc kubenswrapper[4953]: I1011 03:28:44.538398 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq"] Oct 11 03:28:44 crc kubenswrapper[4953]: W1011 03:28:44.545441 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1dfa1f56_823a_4c82_a73c_5eb550c9f00b.slice/crio-52adb5a013d33cd87815d40c241f0ed24261f65d41beb379eae808350e06cb2c WatchSource:0}: Error finding container 52adb5a013d33cd87815d40c241f0ed24261f65d41beb379eae808350e06cb2c: Status 404 returned error can't find the container with id 52adb5a013d33cd87815d40c241f0ed24261f65d41beb379eae808350e06cb2c Oct 11 03:28:44 crc kubenswrapper[4953]: I1011 03:28:44.572591 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" event={"ID":"1dfa1f56-823a-4c82-a73c-5eb550c9f00b","Type":"ContainerStarted","Data":"52adb5a013d33cd87815d40c241f0ed24261f65d41beb379eae808350e06cb2c"} Oct 11 03:28:45 crc kubenswrapper[4953]: I1011 03:28:45.585970 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" event={"ID":"1dfa1f56-823a-4c82-a73c-5eb550c9f00b","Type":"ContainerStarted","Data":"527b1e9fa69464c132279c338f1e44445e5b76be0fecb05627af6eea28f6f83b"} Oct 11 03:28:45 crc kubenswrapper[4953]: I1011 03:28:45.609717 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" podStartSLOduration=2.062734583 podStartE2EDuration="2.60969034s" podCreationTimestamp="2025-10-11 03:28:43 +0000 UTC" firstStartedPulling="2025-10-11 03:28:44.548462798 +0000 UTC m=+2535.481550482" lastFinishedPulling="2025-10-11 03:28:45.095418595 +0000 UTC m=+2536.028506239" observedRunningTime="2025-10-11 03:28:45.602551831 +0000 UTC m=+2536.535639485" watchObservedRunningTime="2025-10-11 03:28:45.60969034 +0000 UTC m=+2536.542778024" Oct 11 03:28:51 crc kubenswrapper[4953]: I1011 03:28:51.654626 4953 generic.go:334] "Generic (PLEG): container finished" podID="1dfa1f56-823a-4c82-a73c-5eb550c9f00b" containerID="527b1e9fa69464c132279c338f1e44445e5b76be0fecb05627af6eea28f6f83b" exitCode=0 Oct 11 03:28:51 crc kubenswrapper[4953]: I1011 03:28:51.654757 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" event={"ID":"1dfa1f56-823a-4c82-a73c-5eb550c9f00b","Type":"ContainerDied","Data":"527b1e9fa69464c132279c338f1e44445e5b76be0fecb05627af6eea28f6f83b"} Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.084799 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.236460 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ceph\") pod \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.236576 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ssh-key\") pod \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.236633 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-inventory\") pod \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.236677 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6kc4\" (UniqueName: \"kubernetes.io/projected/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-kube-api-access-m6kc4\") pod \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\" (UID: \"1dfa1f56-823a-4c82-a73c-5eb550c9f00b\") " Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.242734 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ceph" (OuterVolumeSpecName: "ceph") pod "1dfa1f56-823a-4c82-a73c-5eb550c9f00b" (UID: "1dfa1f56-823a-4c82-a73c-5eb550c9f00b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.242817 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-kube-api-access-m6kc4" (OuterVolumeSpecName: "kube-api-access-m6kc4") pod "1dfa1f56-823a-4c82-a73c-5eb550c9f00b" (UID: "1dfa1f56-823a-4c82-a73c-5eb550c9f00b"). InnerVolumeSpecName "kube-api-access-m6kc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.275902 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1dfa1f56-823a-4c82-a73c-5eb550c9f00b" (UID: "1dfa1f56-823a-4c82-a73c-5eb550c9f00b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.282278 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-inventory" (OuterVolumeSpecName: "inventory") pod "1dfa1f56-823a-4c82-a73c-5eb550c9f00b" (UID: "1dfa1f56-823a-4c82-a73c-5eb550c9f00b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.338297 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.338344 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.338359 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.338374 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6kc4\" (UniqueName: \"kubernetes.io/projected/1dfa1f56-823a-4c82-a73c-5eb550c9f00b-kube-api-access-m6kc4\") on node \"crc\" DevicePath \"\"" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.679250 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" event={"ID":"1dfa1f56-823a-4c82-a73c-5eb550c9f00b","Type":"ContainerDied","Data":"52adb5a013d33cd87815d40c241f0ed24261f65d41beb379eae808350e06cb2c"} Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.679989 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52adb5a013d33cd87815d40c241f0ed24261f65d41beb379eae808350e06cb2c" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.679872 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.784556 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55"] Oct 11 03:28:53 crc kubenswrapper[4953]: E1011 03:28:53.784952 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfa1f56-823a-4c82-a73c-5eb550c9f00b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.784969 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfa1f56-823a-4c82-a73c-5eb550c9f00b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.785128 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dfa1f56-823a-4c82-a73c-5eb550c9f00b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.785709 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.792773 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.793314 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.794113 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.794382 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.795405 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.796895 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.813556 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55"] Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.847866 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.847910 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.847930 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2t5x\" (UniqueName: \"kubernetes.io/projected/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-kube-api-access-q2t5x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.848105 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.848173 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.848647 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.949331 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.949373 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.949471 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.949534 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.949553 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.949571 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2t5x\" (UniqueName: \"kubernetes.io/projected/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-kube-api-access-q2t5x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.950769 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.954450 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.955126 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.956060 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.964209 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:53 crc kubenswrapper[4953]: I1011 03:28:53.965316 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2t5x\" (UniqueName: \"kubernetes.io/projected/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-kube-api-access-q2t5x\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-bvf55\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:54 crc kubenswrapper[4953]: I1011 03:28:54.104813 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:28:54 crc kubenswrapper[4953]: I1011 03:28:54.565642 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55"] Oct 11 03:28:54 crc kubenswrapper[4953]: I1011 03:28:54.687592 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" event={"ID":"5b2e7afe-517e-473b-b1f3-3ab040d6eca4","Type":"ContainerStarted","Data":"0de66d0e1b179eda9fc917d94b8edf9da50a4886b0d16815d1cc2300d655d3c0"} Oct 11 03:28:55 crc kubenswrapper[4953]: I1011 03:28:55.695771 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" event={"ID":"5b2e7afe-517e-473b-b1f3-3ab040d6eca4","Type":"ContainerStarted","Data":"6fd416c540cc24db6782451d3fb347bc706c68b7fdcd7a81b2b03d759d04ce91"} Oct 11 03:28:55 crc kubenswrapper[4953]: I1011 03:28:55.725099 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" podStartSLOduration=2.301817833 podStartE2EDuration="2.725079049s" podCreationTimestamp="2025-10-11 03:28:53 +0000 UTC" firstStartedPulling="2025-10-11 03:28:54.571782812 +0000 UTC m=+2545.504870456" lastFinishedPulling="2025-10-11 03:28:54.995044038 +0000 UTC m=+2545.928131672" observedRunningTime="2025-10-11 03:28:55.714717368 +0000 UTC m=+2546.647805042" watchObservedRunningTime="2025-10-11 03:28:55.725079049 +0000 UTC m=+2546.658166693" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.139278 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm"] Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.141045 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.151796 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm"] Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.189699 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.190874 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.235857 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djvrw\" (UniqueName: \"kubernetes.io/projected/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-kube-api-access-djvrw\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.236507 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-secret-volume\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.236703 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-config-volume\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.337998 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-config-volume\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.338373 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djvrw\" (UniqueName: \"kubernetes.io/projected/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-kube-api-access-djvrw\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.338502 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-secret-volume\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.339100 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-config-volume\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.345220 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-secret-volume\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.366674 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djvrw\" (UniqueName: \"kubernetes.io/projected/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-kube-api-access-djvrw\") pod \"collect-profiles-29335890-zgsbm\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.524507 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:00 crc kubenswrapper[4953]: I1011 03:30:00.957544 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm"] Oct 11 03:30:01 crc kubenswrapper[4953]: I1011 03:30:01.247070 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" event={"ID":"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d","Type":"ContainerStarted","Data":"c156590282ed987e0f2f27873088ecc14eea0edb96c3c7dd7e39852e8ac5e791"} Oct 11 03:30:01 crc kubenswrapper[4953]: I1011 03:30:01.247304 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" event={"ID":"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d","Type":"ContainerStarted","Data":"622751aa872f32bbdaf970ba00a9c9ef178df83a3197e05c80f5178bcfbf2d0f"} Oct 11 03:30:01 crc kubenswrapper[4953]: I1011 03:30:01.276404 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" podStartSLOduration=1.276383855 podStartE2EDuration="1.276383855s" podCreationTimestamp="2025-10-11 03:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:30:01.27024019 +0000 UTC m=+2612.203327854" watchObservedRunningTime="2025-10-11 03:30:01.276383855 +0000 UTC m=+2612.209471499" Oct 11 03:30:02 crc kubenswrapper[4953]: I1011 03:30:02.258279 4953 generic.go:334] "Generic (PLEG): container finished" podID="2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" containerID="c156590282ed987e0f2f27873088ecc14eea0edb96c3c7dd7e39852e8ac5e791" exitCode=0 Oct 11 03:30:02 crc kubenswrapper[4953]: I1011 03:30:02.258374 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" event={"ID":"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d","Type":"ContainerDied","Data":"c156590282ed987e0f2f27873088ecc14eea0edb96c3c7dd7e39852e8ac5e791"} Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.576912 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.695113 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-secret-volume\") pod \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.695210 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-config-volume\") pod \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.695308 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djvrw\" (UniqueName: \"kubernetes.io/projected/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-kube-api-access-djvrw\") pod \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\" (UID: \"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d\") " Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.697278 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-config-volume" (OuterVolumeSpecName: "config-volume") pod "2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" (UID: "2fd8c8bf-6db8-4ad9-b5d3-2651b048232d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.704816 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" (UID: "2fd8c8bf-6db8-4ad9-b5d3-2651b048232d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.704966 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-kube-api-access-djvrw" (OuterVolumeSpecName: "kube-api-access-djvrw") pod "2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" (UID: "2fd8c8bf-6db8-4ad9-b5d3-2651b048232d"). InnerVolumeSpecName "kube-api-access-djvrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.796963 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.797002 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:03 crc kubenswrapper[4953]: I1011 03:30:03.797018 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djvrw\" (UniqueName: \"kubernetes.io/projected/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d-kube-api-access-djvrw\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:04 crc kubenswrapper[4953]: I1011 03:30:04.284456 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" event={"ID":"2fd8c8bf-6db8-4ad9-b5d3-2651b048232d","Type":"ContainerDied","Data":"622751aa872f32bbdaf970ba00a9c9ef178df83a3197e05c80f5178bcfbf2d0f"} Oct 11 03:30:04 crc kubenswrapper[4953]: I1011 03:30:04.284493 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="622751aa872f32bbdaf970ba00a9c9ef178df83a3197e05c80f5178bcfbf2d0f" Oct 11 03:30:04 crc kubenswrapper[4953]: I1011 03:30:04.284528 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm" Oct 11 03:30:04 crc kubenswrapper[4953]: I1011 03:30:04.350004 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct"] Oct 11 03:30:04 crc kubenswrapper[4953]: I1011 03:30:04.365651 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335845-k5sct"] Oct 11 03:30:05 crc kubenswrapper[4953]: I1011 03:30:05.809374 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29cc960-b47b-494a-81bd-617e97aed612" path="/var/lib/kubelet/pods/a29cc960-b47b-494a-81bd-617e97aed612/volumes" Oct 11 03:30:06 crc kubenswrapper[4953]: I1011 03:30:06.306502 4953 generic.go:334] "Generic (PLEG): container finished" podID="5b2e7afe-517e-473b-b1f3-3ab040d6eca4" containerID="6fd416c540cc24db6782451d3fb347bc706c68b7fdcd7a81b2b03d759d04ce91" exitCode=0 Oct 11 03:30:06 crc kubenswrapper[4953]: I1011 03:30:06.306571 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" event={"ID":"5b2e7afe-517e-473b-b1f3-3ab040d6eca4","Type":"ContainerDied","Data":"6fd416c540cc24db6782451d3fb347bc706c68b7fdcd7a81b2b03d759d04ce91"} Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.731170 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.878087 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2t5x\" (UniqueName: \"kubernetes.io/projected/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-kube-api-access-q2t5x\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.878179 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-inventory\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.878252 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ceph\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.878386 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.878419 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ssh-key\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.878524 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovn-combined-ca-bundle\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.884049 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-kube-api-access-q2t5x" (OuterVolumeSpecName: "kube-api-access-q2t5x") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4"). InnerVolumeSpecName "kube-api-access-q2t5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.884290 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.884401 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ceph" (OuterVolumeSpecName: "ceph") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.905253 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-inventory" (OuterVolumeSpecName: "inventory") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:30:07 crc kubenswrapper[4953]: E1011 03:30:07.906021 4953 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0 podName:5b2e7afe-517e-473b-b1f3-3ab040d6eca4 nodeName:}" failed. No retries permitted until 2025-10-11 03:30:08.40597305 +0000 UTC m=+2619.339060694 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovncontroller-config-0" (UniqueName: "kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4") : error deleting /var/lib/kubelet/pods/5b2e7afe-517e-473b-b1f3-3ab040d6eca4/volume-subpaths: remove /var/lib/kubelet/pods/5b2e7afe-517e-473b-b1f3-3ab040d6eca4/volume-subpaths: no such file or directory Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.908971 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.982059 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.982315 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.982410 4953 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.982497 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2t5x\" (UniqueName: \"kubernetes.io/projected/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-kube-api-access-q2t5x\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:07 crc kubenswrapper[4953]: I1011 03:30:07.982569 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.326019 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" event={"ID":"5b2e7afe-517e-473b-b1f3-3ab040d6eca4","Type":"ContainerDied","Data":"0de66d0e1b179eda9fc917d94b8edf9da50a4886b0d16815d1cc2300d655d3c0"} Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.326063 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0de66d0e1b179eda9fc917d94b8edf9da50a4886b0d16815d1cc2300d655d3c0" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.326118 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-bvf55" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.408293 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd"] Oct 11 03:30:08 crc kubenswrapper[4953]: E1011 03:30:08.408742 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b2e7afe-517e-473b-b1f3-3ab040d6eca4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.408766 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b2e7afe-517e-473b-b1f3-3ab040d6eca4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 11 03:30:08 crc kubenswrapper[4953]: E1011 03:30:08.408805 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" containerName="collect-profiles" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.408815 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" containerName="collect-profiles" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.409046 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" containerName="collect-profiles" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.409080 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b2e7afe-517e-473b-b1f3-3ab040d6eca4" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.409834 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.412031 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.413044 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.418247 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd"] Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.491472 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0\") pod \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\" (UID: \"5b2e7afe-517e-473b-b1f3-3ab040d6eca4\") " Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.492070 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "5b2e7afe-517e-473b-b1f3-3ab040d6eca4" (UID: "5b2e7afe-517e-473b-b1f3-3ab040d6eca4"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.492446 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.492567 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.492712 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.492821 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmzds\" (UniqueName: \"kubernetes.io/projected/17997509-4c59-4295-ac71-e5509fbf1425-kube-api-access-vmzds\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.492926 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.493055 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.493179 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.493362 4953 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5b2e7afe-517e-473b-b1f3-3ab040d6eca4-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594301 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594582 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594644 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594667 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmzds\" (UniqueName: \"kubernetes.io/projected/17997509-4c59-4295-ac71-e5509fbf1425-kube-api-access-vmzds\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594691 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594721 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.594742 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.598679 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.598741 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.599105 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.599384 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.600861 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.601213 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.612656 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmzds\" (UniqueName: \"kubernetes.io/projected/17997509-4c59-4295-ac71-e5509fbf1425-kube-api-access-vmzds\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:08 crc kubenswrapper[4953]: I1011 03:30:08.726229 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:30:09 crc kubenswrapper[4953]: I1011 03:30:09.269297 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd"] Oct 11 03:30:09 crc kubenswrapper[4953]: W1011 03:30:09.273582 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17997509_4c59_4295_ac71_e5509fbf1425.slice/crio-85e3a59489e0961354d5fd8268c12b055c621f99fd9493f0be3e0e7fdad9bee9 WatchSource:0}: Error finding container 85e3a59489e0961354d5fd8268c12b055c621f99fd9493f0be3e0e7fdad9bee9: Status 404 returned error can't find the container with id 85e3a59489e0961354d5fd8268c12b055c621f99fd9493f0be3e0e7fdad9bee9 Oct 11 03:30:09 crc kubenswrapper[4953]: I1011 03:30:09.275963 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:30:09 crc kubenswrapper[4953]: I1011 03:30:09.333945 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" event={"ID":"17997509-4c59-4295-ac71-e5509fbf1425","Type":"ContainerStarted","Data":"85e3a59489e0961354d5fd8268c12b055c621f99fd9493f0be3e0e7fdad9bee9"} Oct 11 03:30:10 crc kubenswrapper[4953]: I1011 03:30:10.344819 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" event={"ID":"17997509-4c59-4295-ac71-e5509fbf1425","Type":"ContainerStarted","Data":"0d2d2ad2dbf5fbdf122cfbf8d3810bb2420118ad32a7cc5d0c55e692915d9361"} Oct 11 03:30:10 crc kubenswrapper[4953]: I1011 03:30:10.374390 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" podStartSLOduration=1.8177568339999999 podStartE2EDuration="2.374366494s" podCreationTimestamp="2025-10-11 03:30:08 +0000 UTC" firstStartedPulling="2025-10-11 03:30:09.275745912 +0000 UTC m=+2620.208833556" lastFinishedPulling="2025-10-11 03:30:09.832355572 +0000 UTC m=+2620.765443216" observedRunningTime="2025-10-11 03:30:10.370067646 +0000 UTC m=+2621.303155370" watchObservedRunningTime="2025-10-11 03:30:10.374366494 +0000 UTC m=+2621.307454158" Oct 11 03:30:49 crc kubenswrapper[4953]: I1011 03:30:49.905481 4953 scope.go:117] "RemoveContainer" containerID="488ce46fe2029dc1801f904b60647beeb54bf858b61e45f2a08105a49372a591" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.668442 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-75dh4"] Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.671325 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.679633 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-75dh4"] Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.839890 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-catalog-content\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.840053 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-utilities\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.840163 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h6bf\" (UniqueName: \"kubernetes.io/projected/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-kube-api-access-2h6bf\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.941402 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h6bf\" (UniqueName: \"kubernetes.io/projected/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-kube-api-access-2h6bf\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.941470 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-catalog-content\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.941554 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-utilities\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.942043 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-catalog-content\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.942110 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-utilities\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:06 crc kubenswrapper[4953]: I1011 03:31:06.987331 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h6bf\" (UniqueName: \"kubernetes.io/projected/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-kube-api-access-2h6bf\") pod \"community-operators-75dh4\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:07 crc kubenswrapper[4953]: I1011 03:31:07.001550 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:07 crc kubenswrapper[4953]: I1011 03:31:07.495162 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-75dh4"] Oct 11 03:31:07 crc kubenswrapper[4953]: I1011 03:31:07.879967 4953 generic.go:334] "Generic (PLEG): container finished" podID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerID="68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c" exitCode=0 Oct 11 03:31:07 crc kubenswrapper[4953]: I1011 03:31:07.880083 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerDied","Data":"68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c"} Oct 11 03:31:07 crc kubenswrapper[4953]: I1011 03:31:07.881846 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerStarted","Data":"a0c741db0c8cdc040eabf0116ff909accc671d75de987c5537079e90bc335aea"} Oct 11 03:31:08 crc kubenswrapper[4953]: I1011 03:31:08.892735 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerStarted","Data":"97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2"} Oct 11 03:31:09 crc kubenswrapper[4953]: I1011 03:31:09.921864 4953 generic.go:334] "Generic (PLEG): container finished" podID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerID="97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2" exitCode=0 Oct 11 03:31:09 crc kubenswrapper[4953]: I1011 03:31:09.922860 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerDied","Data":"97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2"} Oct 11 03:31:09 crc kubenswrapper[4953]: I1011 03:31:09.931998 4953 generic.go:334] "Generic (PLEG): container finished" podID="17997509-4c59-4295-ac71-e5509fbf1425" containerID="0d2d2ad2dbf5fbdf122cfbf8d3810bb2420118ad32a7cc5d0c55e692915d9361" exitCode=0 Oct 11 03:31:09 crc kubenswrapper[4953]: I1011 03:31:09.932057 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" event={"ID":"17997509-4c59-4295-ac71-e5509fbf1425","Type":"ContainerDied","Data":"0d2d2ad2dbf5fbdf122cfbf8d3810bb2420118ad32a7cc5d0c55e692915d9361"} Oct 11 03:31:10 crc kubenswrapper[4953]: I1011 03:31:10.944747 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerStarted","Data":"bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5"} Oct 11 03:31:10 crc kubenswrapper[4953]: I1011 03:31:10.973297 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-75dh4" podStartSLOduration=2.39894195 podStartE2EDuration="4.973277919s" podCreationTimestamp="2025-10-11 03:31:06 +0000 UTC" firstStartedPulling="2025-10-11 03:31:07.882781217 +0000 UTC m=+2678.815868861" lastFinishedPulling="2025-10-11 03:31:10.457117186 +0000 UTC m=+2681.390204830" observedRunningTime="2025-10-11 03:31:10.970320654 +0000 UTC m=+2681.903408308" watchObservedRunningTime="2025-10-11 03:31:10.973277919 +0000 UTC m=+2681.906365563" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.316278 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.316568 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.417712 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522340 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ssh-key\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522439 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-ovn-metadata-agent-neutron-config-0\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522483 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-metadata-combined-ca-bundle\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522558 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ceph\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522629 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmzds\" (UniqueName: \"kubernetes.io/projected/17997509-4c59-4295-ac71-e5509fbf1425-kube-api-access-vmzds\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522681 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-nova-metadata-neutron-config-0\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.522721 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-inventory\") pod \"17997509-4c59-4295-ac71-e5509fbf1425\" (UID: \"17997509-4c59-4295-ac71-e5509fbf1425\") " Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.529933 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.530423 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17997509-4c59-4295-ac71-e5509fbf1425-kube-api-access-vmzds" (OuterVolumeSpecName: "kube-api-access-vmzds") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "kube-api-access-vmzds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.532038 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ceph" (OuterVolumeSpecName: "ceph") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.553523 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.554632 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.557359 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-inventory" (OuterVolumeSpecName: "inventory") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.559367 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "17997509-4c59-4295-ac71-e5509fbf1425" (UID: "17997509-4c59-4295-ac71-e5509fbf1425"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625117 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625153 4953 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625166 4953 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625177 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625188 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmzds\" (UniqueName: \"kubernetes.io/projected/17997509-4c59-4295-ac71-e5509fbf1425-kube-api-access-vmzds\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625196 4953 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.625205 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/17997509-4c59-4295-ac71-e5509fbf1425-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.955182 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" event={"ID":"17997509-4c59-4295-ac71-e5509fbf1425","Type":"ContainerDied","Data":"85e3a59489e0961354d5fd8268c12b055c621f99fd9493f0be3e0e7fdad9bee9"} Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.955238 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85e3a59489e0961354d5fd8268c12b055c621f99fd9493f0be3e0e7fdad9bee9" Oct 11 03:31:11 crc kubenswrapper[4953]: I1011 03:31:11.955203 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.065530 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8"] Oct 11 03:31:12 crc kubenswrapper[4953]: E1011 03:31:12.066376 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17997509-4c59-4295-ac71-e5509fbf1425" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.066400 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="17997509-4c59-4295-ac71-e5509fbf1425" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.066724 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="17997509-4c59-4295-ac71-e5509fbf1425" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.067556 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.071183 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.073291 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.073936 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.073969 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.074057 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.074238 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.083665 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8"] Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.234840 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.234932 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.234994 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.235082 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgkjv\" (UniqueName: \"kubernetes.io/projected/43a258fe-2816-4db2-9332-a340941a8b9b-kube-api-access-xgkjv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.235140 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.235194 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.336706 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.336764 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.336798 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.337016 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgkjv\" (UniqueName: \"kubernetes.io/projected/43a258fe-2816-4db2-9332-a340941a8b9b-kube-api-access-xgkjv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.337047 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.337074 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.341149 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.341160 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.343240 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.345096 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.348481 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.357587 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgkjv\" (UniqueName: \"kubernetes.io/projected/43a258fe-2816-4db2-9332-a340941a8b9b-kube-api-access-xgkjv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-fldp8\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.391516 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.742205 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8"] Oct 11 03:31:12 crc kubenswrapper[4953]: W1011 03:31:12.744891 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43a258fe_2816_4db2_9332_a340941a8b9b.slice/crio-e7d7a64e8da0e93462b9314e53b5035bc4349c3dd0f585f396c65716261c21fc WatchSource:0}: Error finding container e7d7a64e8da0e93462b9314e53b5035bc4349c3dd0f585f396c65716261c21fc: Status 404 returned error can't find the container with id e7d7a64e8da0e93462b9314e53b5035bc4349c3dd0f585f396c65716261c21fc Oct 11 03:31:12 crc kubenswrapper[4953]: I1011 03:31:12.964589 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" event={"ID":"43a258fe-2816-4db2-9332-a340941a8b9b","Type":"ContainerStarted","Data":"e7d7a64e8da0e93462b9314e53b5035bc4349c3dd0f585f396c65716261c21fc"} Oct 11 03:31:13 crc kubenswrapper[4953]: I1011 03:31:13.974573 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" event={"ID":"43a258fe-2816-4db2-9332-a340941a8b9b","Type":"ContainerStarted","Data":"49593b6b7a1206cbc6eb8934b1655972aa16d9441125c32a31200a08aed2a4f8"} Oct 11 03:31:13 crc kubenswrapper[4953]: I1011 03:31:13.998357 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" podStartSLOduration=1.371310103 podStartE2EDuration="1.998309003s" podCreationTimestamp="2025-10-11 03:31:12 +0000 UTC" firstStartedPulling="2025-10-11 03:31:12.746796905 +0000 UTC m=+2683.679884589" lastFinishedPulling="2025-10-11 03:31:13.373795835 +0000 UTC m=+2684.306883489" observedRunningTime="2025-10-11 03:31:13.98905691 +0000 UTC m=+2684.922144554" watchObservedRunningTime="2025-10-11 03:31:13.998309003 +0000 UTC m=+2684.931396647" Oct 11 03:31:17 crc kubenswrapper[4953]: I1011 03:31:17.002244 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:17 crc kubenswrapper[4953]: I1011 03:31:17.002798 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:17 crc kubenswrapper[4953]: I1011 03:31:17.072802 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:17 crc kubenswrapper[4953]: I1011 03:31:17.139670 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:17 crc kubenswrapper[4953]: I1011 03:31:17.322279 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-75dh4"] Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.041250 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-75dh4" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="registry-server" containerID="cri-o://bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5" gracePeriod=2 Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.504735 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.677984 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h6bf\" (UniqueName: \"kubernetes.io/projected/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-kube-api-access-2h6bf\") pod \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.678082 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-utilities\") pod \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.678217 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-catalog-content\") pod \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\" (UID: \"e07f8ef1-5073-43a9-9a93-7f733f0f0edf\") " Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.678836 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-utilities" (OuterVolumeSpecName: "utilities") pod "e07f8ef1-5073-43a9-9a93-7f733f0f0edf" (UID: "e07f8ef1-5073-43a9-9a93-7f733f0f0edf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.685690 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-kube-api-access-2h6bf" (OuterVolumeSpecName: "kube-api-access-2h6bf") pod "e07f8ef1-5073-43a9-9a93-7f733f0f0edf" (UID: "e07f8ef1-5073-43a9-9a93-7f733f0f0edf"). InnerVolumeSpecName "kube-api-access-2h6bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.780032 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h6bf\" (UniqueName: \"kubernetes.io/projected/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-kube-api-access-2h6bf\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.780084 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.843470 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e07f8ef1-5073-43a9-9a93-7f733f0f0edf" (UID: "e07f8ef1-5073-43a9-9a93-7f733f0f0edf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:31:19 crc kubenswrapper[4953]: I1011 03:31:19.883378 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e07f8ef1-5073-43a9-9a93-7f733f0f0edf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.050319 4953 generic.go:334] "Generic (PLEG): container finished" podID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerID="bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5" exitCode=0 Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.050362 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerDied","Data":"bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5"} Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.050390 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75dh4" event={"ID":"e07f8ef1-5073-43a9-9a93-7f733f0f0edf","Type":"ContainerDied","Data":"a0c741db0c8cdc040eabf0116ff909accc671d75de987c5537079e90bc335aea"} Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.050407 4953 scope.go:117] "RemoveContainer" containerID="bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.050521 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75dh4" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.068822 4953 scope.go:117] "RemoveContainer" containerID="97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.083002 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-75dh4"] Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.089767 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-75dh4"] Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.109871 4953 scope.go:117] "RemoveContainer" containerID="68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.147244 4953 scope.go:117] "RemoveContainer" containerID="bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5" Oct 11 03:31:20 crc kubenswrapper[4953]: E1011 03:31:20.147642 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5\": container with ID starting with bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5 not found: ID does not exist" containerID="bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.147688 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5"} err="failed to get container status \"bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5\": rpc error: code = NotFound desc = could not find container \"bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5\": container with ID starting with bc64db194c8fea13122ebd5616fda7a03b14536505e2ec464f176ffc4706c3d5 not found: ID does not exist" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.147716 4953 scope.go:117] "RemoveContainer" containerID="97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2" Oct 11 03:31:20 crc kubenswrapper[4953]: E1011 03:31:20.148062 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2\": container with ID starting with 97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2 not found: ID does not exist" containerID="97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.148090 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2"} err="failed to get container status \"97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2\": rpc error: code = NotFound desc = could not find container \"97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2\": container with ID starting with 97635abaafd989e012055ed48e9e1cf026c1d3f2e256b2d797d1b2b7e669ddb2 not found: ID does not exist" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.148111 4953 scope.go:117] "RemoveContainer" containerID="68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c" Oct 11 03:31:20 crc kubenswrapper[4953]: E1011 03:31:20.148313 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c\": container with ID starting with 68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c not found: ID does not exist" containerID="68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c" Oct 11 03:31:20 crc kubenswrapper[4953]: I1011 03:31:20.148334 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c"} err="failed to get container status \"68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c\": rpc error: code = NotFound desc = could not find container \"68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c\": container with ID starting with 68d82ff0a67e9f9137a7ef50b77f1b98fe79dfd5414616c23871c8d94e0d0c3c not found: ID does not exist" Oct 11 03:31:21 crc kubenswrapper[4953]: I1011 03:31:21.811562 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" path="/var/lib/kubelet/pods/e07f8ef1-5073-43a9-9a93-7f733f0f0edf/volumes" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.461777 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mbr2l"] Oct 11 03:31:36 crc kubenswrapper[4953]: E1011 03:31:36.462771 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="extract-utilities" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.462790 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="extract-utilities" Oct 11 03:31:36 crc kubenswrapper[4953]: E1011 03:31:36.462813 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="extract-content" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.462821 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="extract-content" Oct 11 03:31:36 crc kubenswrapper[4953]: E1011 03:31:36.462833 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="registry-server" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.462841 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="registry-server" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.463066 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e07f8ef1-5073-43a9-9a93-7f733f0f0edf" containerName="registry-server" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.464501 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.474837 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mbr2l"] Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.591751 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-catalog-content\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.592284 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-utilities\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.592370 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnp69\" (UniqueName: \"kubernetes.io/projected/e67eb000-0402-484a-afab-18716e03b6a4-kube-api-access-dnp69\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.693466 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-catalog-content\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.693587 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-utilities\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.693641 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnp69\" (UniqueName: \"kubernetes.io/projected/e67eb000-0402-484a-afab-18716e03b6a4-kube-api-access-dnp69\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.694154 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-catalog-content\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.694294 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-utilities\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.717121 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnp69\" (UniqueName: \"kubernetes.io/projected/e67eb000-0402-484a-afab-18716e03b6a4-kube-api-access-dnp69\") pod \"certified-operators-mbr2l\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:36 crc kubenswrapper[4953]: I1011 03:31:36.789283 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:37 crc kubenswrapper[4953]: I1011 03:31:37.296411 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mbr2l"] Oct 11 03:31:38 crc kubenswrapper[4953]: I1011 03:31:38.202485 4953 generic.go:334] "Generic (PLEG): container finished" podID="e67eb000-0402-484a-afab-18716e03b6a4" containerID="5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e" exitCode=0 Oct 11 03:31:38 crc kubenswrapper[4953]: I1011 03:31:38.202574 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbr2l" event={"ID":"e67eb000-0402-484a-afab-18716e03b6a4","Type":"ContainerDied","Data":"5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e"} Oct 11 03:31:38 crc kubenswrapper[4953]: I1011 03:31:38.202930 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbr2l" event={"ID":"e67eb000-0402-484a-afab-18716e03b6a4","Type":"ContainerStarted","Data":"03c6ae8647e7af9b2c1ca1f50199d86b8eb0b6970b437b3d3173067d38ad6968"} Oct 11 03:31:39 crc kubenswrapper[4953]: I1011 03:31:39.224201 4953 generic.go:334] "Generic (PLEG): container finished" podID="e67eb000-0402-484a-afab-18716e03b6a4" containerID="deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e" exitCode=0 Oct 11 03:31:39 crc kubenswrapper[4953]: I1011 03:31:39.224298 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbr2l" event={"ID":"e67eb000-0402-484a-afab-18716e03b6a4","Type":"ContainerDied","Data":"deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e"} Oct 11 03:31:40 crc kubenswrapper[4953]: I1011 03:31:40.234021 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbr2l" event={"ID":"e67eb000-0402-484a-afab-18716e03b6a4","Type":"ContainerStarted","Data":"cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e"} Oct 11 03:31:40 crc kubenswrapper[4953]: I1011 03:31:40.258802 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mbr2l" podStartSLOduration=2.737631847 podStartE2EDuration="4.258784087s" podCreationTimestamp="2025-10-11 03:31:36 +0000 UTC" firstStartedPulling="2025-10-11 03:31:38.207061093 +0000 UTC m=+2709.140148747" lastFinishedPulling="2025-10-11 03:31:39.728213333 +0000 UTC m=+2710.661300987" observedRunningTime="2025-10-11 03:31:40.250595981 +0000 UTC m=+2711.183683645" watchObservedRunningTime="2025-10-11 03:31:40.258784087 +0000 UTC m=+2711.191871731" Oct 11 03:31:41 crc kubenswrapper[4953]: I1011 03:31:41.316428 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:31:41 crc kubenswrapper[4953]: I1011 03:31:41.316728 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:31:46 crc kubenswrapper[4953]: I1011 03:31:46.790231 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:46 crc kubenswrapper[4953]: I1011 03:31:46.790837 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:46 crc kubenswrapper[4953]: I1011 03:31:46.843632 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:47 crc kubenswrapper[4953]: I1011 03:31:47.389717 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:47 crc kubenswrapper[4953]: I1011 03:31:47.437420 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mbr2l"] Oct 11 03:31:49 crc kubenswrapper[4953]: I1011 03:31:49.322307 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mbr2l" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="registry-server" containerID="cri-o://cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e" gracePeriod=2 Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.293577 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.355218 4953 generic.go:334] "Generic (PLEG): container finished" podID="e67eb000-0402-484a-afab-18716e03b6a4" containerID="cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e" exitCode=0 Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.355261 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbr2l" event={"ID":"e67eb000-0402-484a-afab-18716e03b6a4","Type":"ContainerDied","Data":"cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e"} Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.355287 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mbr2l" event={"ID":"e67eb000-0402-484a-afab-18716e03b6a4","Type":"ContainerDied","Data":"03c6ae8647e7af9b2c1ca1f50199d86b8eb0b6970b437b3d3173067d38ad6968"} Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.355307 4953 scope.go:117] "RemoveContainer" containerID="cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.355406 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mbr2l" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.357409 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-catalog-content\") pod \"e67eb000-0402-484a-afab-18716e03b6a4\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.376875 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnp69\" (UniqueName: \"kubernetes.io/projected/e67eb000-0402-484a-afab-18716e03b6a4-kube-api-access-dnp69\") pod \"e67eb000-0402-484a-afab-18716e03b6a4\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.377093 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-utilities\") pod \"e67eb000-0402-484a-afab-18716e03b6a4\" (UID: \"e67eb000-0402-484a-afab-18716e03b6a4\") " Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.378004 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-utilities" (OuterVolumeSpecName: "utilities") pod "e67eb000-0402-484a-afab-18716e03b6a4" (UID: "e67eb000-0402-484a-afab-18716e03b6a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.380356 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.381454 4953 scope.go:117] "RemoveContainer" containerID="deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.384253 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e67eb000-0402-484a-afab-18716e03b6a4-kube-api-access-dnp69" (OuterVolumeSpecName: "kube-api-access-dnp69") pod "e67eb000-0402-484a-afab-18716e03b6a4" (UID: "e67eb000-0402-484a-afab-18716e03b6a4"). InnerVolumeSpecName "kube-api-access-dnp69". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.412212 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e67eb000-0402-484a-afab-18716e03b6a4" (UID: "e67eb000-0402-484a-afab-18716e03b6a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.449648 4953 scope.go:117] "RemoveContainer" containerID="5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.482086 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67eb000-0402-484a-afab-18716e03b6a4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.482121 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnp69\" (UniqueName: \"kubernetes.io/projected/e67eb000-0402-484a-afab-18716e03b6a4-kube-api-access-dnp69\") on node \"crc\" DevicePath \"\"" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.491001 4953 scope.go:117] "RemoveContainer" containerID="cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e" Oct 11 03:31:50 crc kubenswrapper[4953]: E1011 03:31:50.491708 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e\": container with ID starting with cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e not found: ID does not exist" containerID="cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.491772 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e"} err="failed to get container status \"cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e\": rpc error: code = NotFound desc = could not find container \"cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e\": container with ID starting with cc6b82169b933e96519bbfd85deda9b2de25429c1b9b06a9ec73284024ae136e not found: ID does not exist" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.491799 4953 scope.go:117] "RemoveContainer" containerID="deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e" Oct 11 03:31:50 crc kubenswrapper[4953]: E1011 03:31:50.492171 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e\": container with ID starting with deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e not found: ID does not exist" containerID="deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.492202 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e"} err="failed to get container status \"deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e\": rpc error: code = NotFound desc = could not find container \"deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e\": container with ID starting with deca16cd4dce7622e1a5f352908b96de2de85136a87d8a380e9d15bf0e86db4e not found: ID does not exist" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.492221 4953 scope.go:117] "RemoveContainer" containerID="5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e" Oct 11 03:31:50 crc kubenswrapper[4953]: E1011 03:31:50.492478 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e\": container with ID starting with 5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e not found: ID does not exist" containerID="5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.492502 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e"} err="failed to get container status \"5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e\": rpc error: code = NotFound desc = could not find container \"5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e\": container with ID starting with 5946614538ef2ba0dc5e67e792d9b50d512256b89ebe55e31d454b52d4103a3e not found: ID does not exist" Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.698072 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mbr2l"] Oct 11 03:31:50 crc kubenswrapper[4953]: I1011 03:31:50.705508 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mbr2l"] Oct 11 03:31:51 crc kubenswrapper[4953]: I1011 03:31:51.806646 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e67eb000-0402-484a-afab-18716e03b6a4" path="/var/lib/kubelet/pods/e67eb000-0402-484a-afab-18716e03b6a4/volumes" Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.316159 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.316783 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.316828 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.317580 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"573e359637a61666d2b2bc5011f914b75dfb1cd4b6a8cd9c705dc96edf5e5c5c"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.317657 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://573e359637a61666d2b2bc5011f914b75dfb1cd4b6a8cd9c705dc96edf5e5c5c" gracePeriod=600 Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.559321 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="573e359637a61666d2b2bc5011f914b75dfb1cd4b6a8cd9c705dc96edf5e5c5c" exitCode=0 Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.559403 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"573e359637a61666d2b2bc5011f914b75dfb1cd4b6a8cd9c705dc96edf5e5c5c"} Oct 11 03:32:11 crc kubenswrapper[4953]: I1011 03:32:11.559728 4953 scope.go:117] "RemoveContainer" containerID="9acbde548850b0ef4d46ed582a0df6f89545b7900e919acd34aab94277d29cc4" Oct 11 03:32:12 crc kubenswrapper[4953]: I1011 03:32:12.571069 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922"} Oct 11 03:34:11 crc kubenswrapper[4953]: I1011 03:34:11.316652 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:34:11 crc kubenswrapper[4953]: I1011 03:34:11.317195 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:34:41 crc kubenswrapper[4953]: I1011 03:34:41.316507 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:34:41 crc kubenswrapper[4953]: I1011 03:34:41.317147 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:35:11 crc kubenswrapper[4953]: I1011 03:35:11.316185 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:35:11 crc kubenswrapper[4953]: I1011 03:35:11.316896 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:35:11 crc kubenswrapper[4953]: I1011 03:35:11.316960 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:35:11 crc kubenswrapper[4953]: I1011 03:35:11.317809 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:35:11 crc kubenswrapper[4953]: I1011 03:35:11.317896 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" gracePeriod=600 Oct 11 03:35:11 crc kubenswrapper[4953]: E1011 03:35:11.453794 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:35:12 crc kubenswrapper[4953]: I1011 03:35:12.160555 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" exitCode=0 Oct 11 03:35:12 crc kubenswrapper[4953]: I1011 03:35:12.160641 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922"} Oct 11 03:35:12 crc kubenswrapper[4953]: I1011 03:35:12.160974 4953 scope.go:117] "RemoveContainer" containerID="573e359637a61666d2b2bc5011f914b75dfb1cd4b6a8cd9c705dc96edf5e5c5c" Oct 11 03:35:12 crc kubenswrapper[4953]: I1011 03:35:12.161598 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:35:12 crc kubenswrapper[4953]: E1011 03:35:12.161923 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:35:26 crc kubenswrapper[4953]: I1011 03:35:26.796712 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:35:26 crc kubenswrapper[4953]: E1011 03:35:26.797788 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:35:35 crc kubenswrapper[4953]: I1011 03:35:35.372621 4953 generic.go:334] "Generic (PLEG): container finished" podID="43a258fe-2816-4db2-9332-a340941a8b9b" containerID="49593b6b7a1206cbc6eb8934b1655972aa16d9441125c32a31200a08aed2a4f8" exitCode=0 Oct 11 03:35:35 crc kubenswrapper[4953]: I1011 03:35:35.372887 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" event={"ID":"43a258fe-2816-4db2-9332-a340941a8b9b","Type":"ContainerDied","Data":"49593b6b7a1206cbc6eb8934b1655972aa16d9441125c32a31200a08aed2a4f8"} Oct 11 03:35:36 crc kubenswrapper[4953]: I1011 03:35:36.833492 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.034093 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ssh-key\") pod \"43a258fe-2816-4db2-9332-a340941a8b9b\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.034185 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-secret-0\") pod \"43a258fe-2816-4db2-9332-a340941a8b9b\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.034223 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ceph\") pod \"43a258fe-2816-4db2-9332-a340941a8b9b\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.034280 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-combined-ca-bundle\") pod \"43a258fe-2816-4db2-9332-a340941a8b9b\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.034305 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgkjv\" (UniqueName: \"kubernetes.io/projected/43a258fe-2816-4db2-9332-a340941a8b9b-kube-api-access-xgkjv\") pod \"43a258fe-2816-4db2-9332-a340941a8b9b\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.034362 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-inventory\") pod \"43a258fe-2816-4db2-9332-a340941a8b9b\" (UID: \"43a258fe-2816-4db2-9332-a340941a8b9b\") " Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.039951 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43a258fe-2816-4db2-9332-a340941a8b9b-kube-api-access-xgkjv" (OuterVolumeSpecName: "kube-api-access-xgkjv") pod "43a258fe-2816-4db2-9332-a340941a8b9b" (UID: "43a258fe-2816-4db2-9332-a340941a8b9b"). InnerVolumeSpecName "kube-api-access-xgkjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.041868 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ceph" (OuterVolumeSpecName: "ceph") pod "43a258fe-2816-4db2-9332-a340941a8b9b" (UID: "43a258fe-2816-4db2-9332-a340941a8b9b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.047709 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "43a258fe-2816-4db2-9332-a340941a8b9b" (UID: "43a258fe-2816-4db2-9332-a340941a8b9b"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.061206 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-inventory" (OuterVolumeSpecName: "inventory") pod "43a258fe-2816-4db2-9332-a340941a8b9b" (UID: "43a258fe-2816-4db2-9332-a340941a8b9b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.074970 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "43a258fe-2816-4db2-9332-a340941a8b9b" (UID: "43a258fe-2816-4db2-9332-a340941a8b9b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.078441 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "43a258fe-2816-4db2-9332-a340941a8b9b" (UID: "43a258fe-2816-4db2-9332-a340941a8b9b"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.136513 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.136782 4953 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.136872 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.137031 4953 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.137116 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgkjv\" (UniqueName: \"kubernetes.io/projected/43a258fe-2816-4db2-9332-a340941a8b9b-kube-api-access-xgkjv\") on node \"crc\" DevicePath \"\"" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.137195 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43a258fe-2816-4db2-9332-a340941a8b9b-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.390591 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" event={"ID":"43a258fe-2816-4db2-9332-a340941a8b9b","Type":"ContainerDied","Data":"e7d7a64e8da0e93462b9314e53b5035bc4349c3dd0f585f396c65716261c21fc"} Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.390654 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7d7a64e8da0e93462b9314e53b5035bc4349c3dd0f585f396c65716261c21fc" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.390708 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-fldp8" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.505458 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj"] Oct 11 03:35:37 crc kubenswrapper[4953]: E1011 03:35:37.505951 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="registry-server" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.505968 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="registry-server" Oct 11 03:35:37 crc kubenswrapper[4953]: E1011 03:35:37.505980 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43a258fe-2816-4db2-9332-a340941a8b9b" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.505987 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="43a258fe-2816-4db2-9332-a340941a8b9b" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 11 03:35:37 crc kubenswrapper[4953]: E1011 03:35:37.506015 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="extract-content" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.506021 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="extract-content" Oct 11 03:35:37 crc kubenswrapper[4953]: E1011 03:35:37.506040 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="extract-utilities" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.506047 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="extract-utilities" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.507420 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="43a258fe-2816-4db2-9332-a340941a8b9b" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.507453 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e67eb000-0402-484a-afab-18716e03b6a4" containerName="registry-server" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.508278 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.513066 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.513265 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.513393 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.513582 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.514161 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rrncr" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.514424 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.516265 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.516404 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.516557 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.517558 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj"] Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.550928 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.550991 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551061 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551093 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551124 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551143 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551177 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551195 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncpq5\" (UniqueName: \"kubernetes.io/projected/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-kube-api-access-ncpq5\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551243 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551282 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.551303 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653192 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653262 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653299 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653321 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653354 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653372 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncpq5\" (UniqueName: \"kubernetes.io/projected/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-kube-api-access-ncpq5\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653417 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653459 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653485 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653518 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.653558 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.655394 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.655623 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.657197 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.657249 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.657728 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.658275 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.659033 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.659912 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.659916 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.660153 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.678107 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncpq5\" (UniqueName: \"kubernetes.io/projected/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-kube-api-access-ncpq5\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:37 crc kubenswrapper[4953]: I1011 03:35:37.827996 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:35:38 crc kubenswrapper[4953]: I1011 03:35:38.345029 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj"] Oct 11 03:35:38 crc kubenswrapper[4953]: I1011 03:35:38.353246 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:35:38 crc kubenswrapper[4953]: I1011 03:35:38.399195 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" event={"ID":"f779c8e1-e272-4dc8-b907-8b6e6ac836ef","Type":"ContainerStarted","Data":"0a4a3fe8e92a175d3a68f844b271bda260448734a51907cbfa09786e542ed66d"} Oct 11 03:35:39 crc kubenswrapper[4953]: I1011 03:35:39.409084 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" event={"ID":"f779c8e1-e272-4dc8-b907-8b6e6ac836ef","Type":"ContainerStarted","Data":"9c4914d485e4f2a9bcf3882103eb4ea49385924556f85af26f6978c0e7b8e4a2"} Oct 11 03:35:39 crc kubenswrapper[4953]: I1011 03:35:39.432414 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" podStartSLOduration=1.974132159 podStartE2EDuration="2.432395365s" podCreationTimestamp="2025-10-11 03:35:37 +0000 UTC" firstStartedPulling="2025-10-11 03:35:38.353026697 +0000 UTC m=+2949.286114341" lastFinishedPulling="2025-10-11 03:35:38.811289903 +0000 UTC m=+2949.744377547" observedRunningTime="2025-10-11 03:35:39.424771883 +0000 UTC m=+2950.357859557" watchObservedRunningTime="2025-10-11 03:35:39.432395365 +0000 UTC m=+2950.365483029" Oct 11 03:35:41 crc kubenswrapper[4953]: I1011 03:35:41.796008 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:35:41 crc kubenswrapper[4953]: E1011 03:35:41.796586 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:35:53 crc kubenswrapper[4953]: I1011 03:35:53.795315 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:35:53 crc kubenswrapper[4953]: E1011 03:35:53.796439 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:36:04 crc kubenswrapper[4953]: I1011 03:36:04.795627 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:36:04 crc kubenswrapper[4953]: E1011 03:36:04.796476 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.512923 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-44j7z"] Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.515361 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.539456 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-44j7z"] Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.623379 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49q4d\" (UniqueName: \"kubernetes.io/projected/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-kube-api-access-49q4d\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.623478 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-utilities\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.623541 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-catalog-content\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.724832 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49q4d\" (UniqueName: \"kubernetes.io/projected/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-kube-api-access-49q4d\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.724934 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-utilities\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.724991 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-catalog-content\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.725532 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-catalog-content\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.726023 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-utilities\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.748233 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49q4d\" (UniqueName: \"kubernetes.io/projected/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-kube-api-access-49q4d\") pod \"redhat-marketplace-44j7z\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:12 crc kubenswrapper[4953]: I1011 03:36:12.843167 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:13 crc kubenswrapper[4953]: I1011 03:36:13.336405 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-44j7z"] Oct 11 03:36:13 crc kubenswrapper[4953]: I1011 03:36:13.720046 4953 generic.go:334] "Generic (PLEG): container finished" podID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerID="16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e" exitCode=0 Oct 11 03:36:13 crc kubenswrapper[4953]: I1011 03:36:13.720107 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44j7z" event={"ID":"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35","Type":"ContainerDied","Data":"16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e"} Oct 11 03:36:13 crc kubenswrapper[4953]: I1011 03:36:13.720138 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44j7z" event={"ID":"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35","Type":"ContainerStarted","Data":"0a9f4900b295fe897385026bbb05f03718c2b362abc3c91daa32888ba2e73b94"} Oct 11 03:36:15 crc kubenswrapper[4953]: I1011 03:36:15.739697 4953 generic.go:334] "Generic (PLEG): container finished" podID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerID="d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0" exitCode=0 Oct 11 03:36:15 crc kubenswrapper[4953]: I1011 03:36:15.739796 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44j7z" event={"ID":"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35","Type":"ContainerDied","Data":"d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0"} Oct 11 03:36:16 crc kubenswrapper[4953]: I1011 03:36:16.757271 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44j7z" event={"ID":"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35","Type":"ContainerStarted","Data":"881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a"} Oct 11 03:36:16 crc kubenswrapper[4953]: I1011 03:36:16.792031 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-44j7z" podStartSLOduration=2.364840604 podStartE2EDuration="4.792012411s" podCreationTimestamp="2025-10-11 03:36:12 +0000 UTC" firstStartedPulling="2025-10-11 03:36:13.722882948 +0000 UTC m=+2984.655970592" lastFinishedPulling="2025-10-11 03:36:16.150054755 +0000 UTC m=+2987.083142399" observedRunningTime="2025-10-11 03:36:16.785484287 +0000 UTC m=+2987.718571941" watchObservedRunningTime="2025-10-11 03:36:16.792012411 +0000 UTC m=+2987.725100055" Oct 11 03:36:18 crc kubenswrapper[4953]: I1011 03:36:18.795326 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:36:18 crc kubenswrapper[4953]: E1011 03:36:18.795983 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:36:22 crc kubenswrapper[4953]: I1011 03:36:22.844417 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:22 crc kubenswrapper[4953]: I1011 03:36:22.844833 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:22 crc kubenswrapper[4953]: I1011 03:36:22.904431 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:23 crc kubenswrapper[4953]: I1011 03:36:23.867372 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:23 crc kubenswrapper[4953]: I1011 03:36:23.917407 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-44j7z"] Oct 11 03:36:25 crc kubenswrapper[4953]: I1011 03:36:25.832426 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-44j7z" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="registry-server" containerID="cri-o://881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a" gracePeriod=2 Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.263869 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.419340 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-utilities\") pod \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.419530 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-catalog-content\") pod \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.419567 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49q4d\" (UniqueName: \"kubernetes.io/projected/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-kube-api-access-49q4d\") pod \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\" (UID: \"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35\") " Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.420596 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-utilities" (OuterVolumeSpecName: "utilities") pod "c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" (UID: "c5117c8e-20b4-438d-bfe4-9bc57b8cbe35"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.429001 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-kube-api-access-49q4d" (OuterVolumeSpecName: "kube-api-access-49q4d") pod "c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" (UID: "c5117c8e-20b4-438d-bfe4-9bc57b8cbe35"). InnerVolumeSpecName "kube-api-access-49q4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.434894 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" (UID: "c5117c8e-20b4-438d-bfe4-9bc57b8cbe35"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.522496 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.522828 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.522842 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49q4d\" (UniqueName: \"kubernetes.io/projected/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35-kube-api-access-49q4d\") on node \"crc\" DevicePath \"\"" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.851482 4953 generic.go:334] "Generic (PLEG): container finished" podID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerID="881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a" exitCode=0 Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.851539 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44j7z" event={"ID":"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35","Type":"ContainerDied","Data":"881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a"} Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.851569 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44j7z" event={"ID":"c5117c8e-20b4-438d-bfe4-9bc57b8cbe35","Type":"ContainerDied","Data":"0a9f4900b295fe897385026bbb05f03718c2b362abc3c91daa32888ba2e73b94"} Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.851610 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44j7z" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.851617 4953 scope.go:117] "RemoveContainer" containerID="881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.873352 4953 scope.go:117] "RemoveContainer" containerID="d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.892279 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-44j7z"] Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.903815 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-44j7z"] Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.930028 4953 scope.go:117] "RemoveContainer" containerID="16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.963111 4953 scope.go:117] "RemoveContainer" containerID="881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a" Oct 11 03:36:26 crc kubenswrapper[4953]: E1011 03:36:26.963552 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a\": container with ID starting with 881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a not found: ID does not exist" containerID="881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.963596 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a"} err="failed to get container status \"881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a\": rpc error: code = NotFound desc = could not find container \"881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a\": container with ID starting with 881fdccfc18fa2dcac97b9731d7e99fcb8d4f9b0b031a5d4b2c899416205795a not found: ID does not exist" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.963649 4953 scope.go:117] "RemoveContainer" containerID="d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0" Oct 11 03:36:26 crc kubenswrapper[4953]: E1011 03:36:26.964524 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0\": container with ID starting with d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0 not found: ID does not exist" containerID="d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.964569 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0"} err="failed to get container status \"d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0\": rpc error: code = NotFound desc = could not find container \"d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0\": container with ID starting with d350b53c2f179d0caeebdebbfff5e173ae6c1e807155df046b8bb6b62a2921f0 not found: ID does not exist" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.964596 4953 scope.go:117] "RemoveContainer" containerID="16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e" Oct 11 03:36:26 crc kubenswrapper[4953]: E1011 03:36:26.965285 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e\": container with ID starting with 16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e not found: ID does not exist" containerID="16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e" Oct 11 03:36:26 crc kubenswrapper[4953]: I1011 03:36:26.965321 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e"} err="failed to get container status \"16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e\": rpc error: code = NotFound desc = could not find container \"16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e\": container with ID starting with 16563a8dd568b81cdbb49a948b1791b79804bc1e9cb8c1aab43a95288eac220e not found: ID does not exist" Oct 11 03:36:27 crc kubenswrapper[4953]: I1011 03:36:27.806690 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" path="/var/lib/kubelet/pods/c5117c8e-20b4-438d-bfe4-9bc57b8cbe35/volumes" Oct 11 03:36:29 crc kubenswrapper[4953]: I1011 03:36:29.803528 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:36:29 crc kubenswrapper[4953]: E1011 03:36:29.807826 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:36:44 crc kubenswrapper[4953]: I1011 03:36:44.795663 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:36:44 crc kubenswrapper[4953]: E1011 03:36:44.796466 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:36:59 crc kubenswrapper[4953]: I1011 03:36:59.800979 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:36:59 crc kubenswrapper[4953]: E1011 03:36:59.801811 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.800914 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-prbnp"] Oct 11 03:37:06 crc kubenswrapper[4953]: E1011 03:37:06.801892 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="registry-server" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.801907 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="registry-server" Oct 11 03:37:06 crc kubenswrapper[4953]: E1011 03:37:06.801919 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="extract-content" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.801925 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="extract-content" Oct 11 03:37:06 crc kubenswrapper[4953]: E1011 03:37:06.801941 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="extract-utilities" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.801947 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="extract-utilities" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.802225 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5117c8e-20b4-438d-bfe4-9bc57b8cbe35" containerName="registry-server" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.805489 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.817819 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-prbnp"] Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.888222 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-utilities\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.888405 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thsz7\" (UniqueName: \"kubernetes.io/projected/ebf8503b-5220-46f5-adff-f251cbe4a017-kube-api-access-thsz7\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.888485 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-catalog-content\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.990793 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-utilities\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.990901 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thsz7\" (UniqueName: \"kubernetes.io/projected/ebf8503b-5220-46f5-adff-f251cbe4a017-kube-api-access-thsz7\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.990953 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-catalog-content\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.991505 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-catalog-content\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:06 crc kubenswrapper[4953]: I1011 03:37:06.991774 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-utilities\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:07 crc kubenswrapper[4953]: I1011 03:37:07.027155 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thsz7\" (UniqueName: \"kubernetes.io/projected/ebf8503b-5220-46f5-adff-f251cbe4a017-kube-api-access-thsz7\") pod \"redhat-operators-prbnp\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:07 crc kubenswrapper[4953]: I1011 03:37:07.123061 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:07 crc kubenswrapper[4953]: I1011 03:37:07.628123 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-prbnp"] Oct 11 03:37:08 crc kubenswrapper[4953]: I1011 03:37:08.200080 4953 generic.go:334] "Generic (PLEG): container finished" podID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerID="b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7" exitCode=0 Oct 11 03:37:08 crc kubenswrapper[4953]: I1011 03:37:08.200129 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prbnp" event={"ID":"ebf8503b-5220-46f5-adff-f251cbe4a017","Type":"ContainerDied","Data":"b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7"} Oct 11 03:37:08 crc kubenswrapper[4953]: I1011 03:37:08.200153 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prbnp" event={"ID":"ebf8503b-5220-46f5-adff-f251cbe4a017","Type":"ContainerStarted","Data":"39f128417b556584434fcf84e26d3a1940b8dd3619d49c07914009e0e74366a4"} Oct 11 03:37:10 crc kubenswrapper[4953]: I1011 03:37:10.218967 4953 generic.go:334] "Generic (PLEG): container finished" podID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerID="d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3" exitCode=0 Oct 11 03:37:10 crc kubenswrapper[4953]: I1011 03:37:10.219010 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prbnp" event={"ID":"ebf8503b-5220-46f5-adff-f251cbe4a017","Type":"ContainerDied","Data":"d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3"} Oct 11 03:37:11 crc kubenswrapper[4953]: I1011 03:37:11.257339 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prbnp" event={"ID":"ebf8503b-5220-46f5-adff-f251cbe4a017","Type":"ContainerStarted","Data":"6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5"} Oct 11 03:37:11 crc kubenswrapper[4953]: I1011 03:37:11.288709 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-prbnp" podStartSLOduration=2.872055222 podStartE2EDuration="5.288690504s" podCreationTimestamp="2025-10-11 03:37:06 +0000 UTC" firstStartedPulling="2025-10-11 03:37:08.202551763 +0000 UTC m=+3039.135639407" lastFinishedPulling="2025-10-11 03:37:10.619187045 +0000 UTC m=+3041.552274689" observedRunningTime="2025-10-11 03:37:11.28135436 +0000 UTC m=+3042.214442014" watchObservedRunningTime="2025-10-11 03:37:11.288690504 +0000 UTC m=+3042.221778138" Oct 11 03:37:12 crc kubenswrapper[4953]: I1011 03:37:12.795140 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:37:12 crc kubenswrapper[4953]: E1011 03:37:12.795626 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:37:17 crc kubenswrapper[4953]: I1011 03:37:17.123597 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:17 crc kubenswrapper[4953]: I1011 03:37:17.124004 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:17 crc kubenswrapper[4953]: I1011 03:37:17.179786 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:17 crc kubenswrapper[4953]: I1011 03:37:17.368185 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:17 crc kubenswrapper[4953]: I1011 03:37:17.426933 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-prbnp"] Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.327649 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-prbnp" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="registry-server" containerID="cri-o://6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5" gracePeriod=2 Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.754594 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.937525 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-utilities\") pod \"ebf8503b-5220-46f5-adff-f251cbe4a017\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.938033 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-catalog-content\") pod \"ebf8503b-5220-46f5-adff-f251cbe4a017\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.938198 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thsz7\" (UniqueName: \"kubernetes.io/projected/ebf8503b-5220-46f5-adff-f251cbe4a017-kube-api-access-thsz7\") pod \"ebf8503b-5220-46f5-adff-f251cbe4a017\" (UID: \"ebf8503b-5220-46f5-adff-f251cbe4a017\") " Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.938542 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-utilities" (OuterVolumeSpecName: "utilities") pod "ebf8503b-5220-46f5-adff-f251cbe4a017" (UID: "ebf8503b-5220-46f5-adff-f251cbe4a017"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.939021 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:37:19 crc kubenswrapper[4953]: I1011 03:37:19.943113 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebf8503b-5220-46f5-adff-f251cbe4a017-kube-api-access-thsz7" (OuterVolumeSpecName: "kube-api-access-thsz7") pod "ebf8503b-5220-46f5-adff-f251cbe4a017" (UID: "ebf8503b-5220-46f5-adff-f251cbe4a017"). InnerVolumeSpecName "kube-api-access-thsz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.041576 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thsz7\" (UniqueName: \"kubernetes.io/projected/ebf8503b-5220-46f5-adff-f251cbe4a017-kube-api-access-thsz7\") on node \"crc\" DevicePath \"\"" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.341456 4953 generic.go:334] "Generic (PLEG): container finished" podID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerID="6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5" exitCode=0 Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.341508 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prbnp" event={"ID":"ebf8503b-5220-46f5-adff-f251cbe4a017","Type":"ContainerDied","Data":"6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5"} Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.341544 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-prbnp" event={"ID":"ebf8503b-5220-46f5-adff-f251cbe4a017","Type":"ContainerDied","Data":"39f128417b556584434fcf84e26d3a1940b8dd3619d49c07914009e0e74366a4"} Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.341574 4953 scope.go:117] "RemoveContainer" containerID="6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.341595 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-prbnp" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.363912 4953 scope.go:117] "RemoveContainer" containerID="d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.426145 4953 scope.go:117] "RemoveContainer" containerID="b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.453765 4953 scope.go:117] "RemoveContainer" containerID="6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5" Oct 11 03:37:20 crc kubenswrapper[4953]: E1011 03:37:20.454218 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5\": container with ID starting with 6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5 not found: ID does not exist" containerID="6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.454266 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5"} err="failed to get container status \"6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5\": rpc error: code = NotFound desc = could not find container \"6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5\": container with ID starting with 6077eda08a40c06b38d05f6d1efbe87f5760fb983a1258d384aa383bdb552ff5 not found: ID does not exist" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.454299 4953 scope.go:117] "RemoveContainer" containerID="d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3" Oct 11 03:37:20 crc kubenswrapper[4953]: E1011 03:37:20.455000 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3\": container with ID starting with d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3 not found: ID does not exist" containerID="d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.455032 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3"} err="failed to get container status \"d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3\": rpc error: code = NotFound desc = could not find container \"d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3\": container with ID starting with d32746d12ecb02f789d8923ac6257a4d098df1b49a1f74a78b7283b48a8a5ee3 not found: ID does not exist" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.455057 4953 scope.go:117] "RemoveContainer" containerID="b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7" Oct 11 03:37:20 crc kubenswrapper[4953]: E1011 03:37:20.455960 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7\": container with ID starting with b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7 not found: ID does not exist" containerID="b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7" Oct 11 03:37:20 crc kubenswrapper[4953]: I1011 03:37:20.456000 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7"} err="failed to get container status \"b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7\": rpc error: code = NotFound desc = could not find container \"b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7\": container with ID starting with b3920fd536bc0a589c26c77f30db199535ee5f9f91374806e4dafdb1f5c538c7 not found: ID does not exist" Oct 11 03:37:22 crc kubenswrapper[4953]: I1011 03:37:22.213368 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebf8503b-5220-46f5-adff-f251cbe4a017" (UID: "ebf8503b-5220-46f5-adff-f251cbe4a017"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:37:22 crc kubenswrapper[4953]: I1011 03:37:22.283834 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebf8503b-5220-46f5-adff-f251cbe4a017-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:37:22 crc kubenswrapper[4953]: I1011 03:37:22.478079 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-prbnp"] Oct 11 03:37:22 crc kubenswrapper[4953]: I1011 03:37:22.487177 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-prbnp"] Oct 11 03:37:23 crc kubenswrapper[4953]: I1011 03:37:23.807276 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" path="/var/lib/kubelet/pods/ebf8503b-5220-46f5-adff-f251cbe4a017/volumes" Oct 11 03:37:27 crc kubenswrapper[4953]: I1011 03:37:27.795526 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:37:27 crc kubenswrapper[4953]: E1011 03:37:27.796276 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:37:39 crc kubenswrapper[4953]: I1011 03:37:39.802378 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:37:39 crc kubenswrapper[4953]: E1011 03:37:39.803167 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:37:51 crc kubenswrapper[4953]: I1011 03:37:51.795548 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:37:51 crc kubenswrapper[4953]: E1011 03:37:51.796478 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:38:05 crc kubenswrapper[4953]: I1011 03:38:05.795365 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:38:05 crc kubenswrapper[4953]: E1011 03:38:05.796156 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:38:19 crc kubenswrapper[4953]: I1011 03:38:19.800530 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:38:19 crc kubenswrapper[4953]: E1011 03:38:19.801299 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:38:30 crc kubenswrapper[4953]: I1011 03:38:30.795247 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:38:30 crc kubenswrapper[4953]: E1011 03:38:30.796139 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:38:43 crc kubenswrapper[4953]: I1011 03:38:43.795340 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:38:43 crc kubenswrapper[4953]: E1011 03:38:43.796489 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:38:57 crc kubenswrapper[4953]: I1011 03:38:57.795707 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:38:57 crc kubenswrapper[4953]: E1011 03:38:57.796581 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:39:10 crc kubenswrapper[4953]: I1011 03:39:10.795371 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:39:10 crc kubenswrapper[4953]: E1011 03:39:10.796154 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:39:23 crc kubenswrapper[4953]: I1011 03:39:23.532342 4953 generic.go:334] "Generic (PLEG): container finished" podID="f779c8e1-e272-4dc8-b907-8b6e6ac836ef" containerID="9c4914d485e4f2a9bcf3882103eb4ea49385924556f85af26f6978c0e7b8e4a2" exitCode=0 Oct 11 03:39:23 crc kubenswrapper[4953]: I1011 03:39:23.532472 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" event={"ID":"f779c8e1-e272-4dc8-b907-8b6e6ac836ef","Type":"ContainerDied","Data":"9c4914d485e4f2a9bcf3882103eb4ea49385924556f85af26f6978c0e7b8e4a2"} Oct 11 03:39:23 crc kubenswrapper[4953]: I1011 03:39:23.795419 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:39:23 crc kubenswrapper[4953]: E1011 03:39:23.795934 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:39:24 crc kubenswrapper[4953]: I1011 03:39:24.928459 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043120 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-1\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043163 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-1\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043252 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043291 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-0\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043319 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-0\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043340 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncpq5\" (UniqueName: \"kubernetes.io/projected/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-kube-api-access-ncpq5\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043372 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-extra-config-0\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043442 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-inventory\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043473 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ssh-key\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043555 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph-nova-0\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.043592 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-custom-ceph-combined-ca-bundle\") pod \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\" (UID: \"f779c8e1-e272-4dc8-b907-8b6e6ac836ef\") " Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.048835 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.049791 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-kube-api-access-ncpq5" (OuterVolumeSpecName: "kube-api-access-ncpq5") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "kube-api-access-ncpq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.050050 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph" (OuterVolumeSpecName: "ceph") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.072161 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.072732 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.073453 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.074224 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.077220 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-inventory" (OuterVolumeSpecName: "inventory") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.081249 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.085789 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.091810 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f779c8e1-e272-4dc8-b907-8b6e6ac836ef" (UID: "f779c8e1-e272-4dc8-b907-8b6e6ac836ef"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.145972 4953 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146052 4953 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146065 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncpq5\" (UniqueName: \"kubernetes.io/projected/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-kube-api-access-ncpq5\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146078 4953 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146093 4953 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-inventory\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146103 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146114 4953 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146125 4953 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146137 4953 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146148 4953 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.146158 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f779c8e1-e272-4dc8-b907-8b6e6ac836ef-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.551803 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" event={"ID":"f779c8e1-e272-4dc8-b907-8b6e6ac836ef","Type":"ContainerDied","Data":"0a4a3fe8e92a175d3a68f844b271bda260448734a51907cbfa09786e542ed66d"} Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.551844 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a4a3fe8e92a175d3a68f844b271bda260448734a51907cbfa09786e542ed66d" Oct 11 03:39:25 crc kubenswrapper[4953]: I1011 03:39:25.551864 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj" Oct 11 03:39:36 crc kubenswrapper[4953]: I1011 03:39:36.794871 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:39:36 crc kubenswrapper[4953]: E1011 03:39:36.795632 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.322990 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 11 03:39:39 crc kubenswrapper[4953]: E1011 03:39:39.323536 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="registry-server" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.323549 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="registry-server" Oct 11 03:39:39 crc kubenswrapper[4953]: E1011 03:39:39.323571 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="extract-utilities" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.323577 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="extract-utilities" Oct 11 03:39:39 crc kubenswrapper[4953]: E1011 03:39:39.323594 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="extract-content" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.323617 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="extract-content" Oct 11 03:39:39 crc kubenswrapper[4953]: E1011 03:39:39.323626 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f779c8e1-e272-4dc8-b907-8b6e6ac836ef" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.323634 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f779c8e1-e272-4dc8-b907-8b6e6ac836ef" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.323789 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f779c8e1-e272-4dc8-b907-8b6e6ac836ef" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.323802 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebf8503b-5220-46f5-adff-f251cbe4a017" containerName="registry-server" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.324710 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.329338 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.329354 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.346448 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444735 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444775 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444802 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444833 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444852 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444873 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-dev\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444899 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444926 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-run\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444942 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pck7h\" (UniqueName: \"kubernetes.io/projected/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-kube-api-access-pck7h\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444967 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-sys\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.444984 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.445001 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.445030 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.445179 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.445292 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.445320 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.461858 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.463515 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.468795 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.478248 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546359 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-config-data\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546407 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546426 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546453 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-config-data-custom\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546480 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-sys\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546520 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546590 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546639 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546658 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546676 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546695 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546720 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546739 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-dev\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546781 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546810 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546839 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4cns\" (UniqueName: \"kubernetes.io/projected/0cb94931-13cd-42d6-89e5-0862980caa67-kube-api-access-b4cns\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546886 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-run\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546908 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pck7h\" (UniqueName: \"kubernetes.io/projected/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-kube-api-access-pck7h\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546932 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546957 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-run\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.546994 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-sys\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547024 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547050 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547077 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547098 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-nvme\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547123 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-dev\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547149 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547186 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547228 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-scripts\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547253 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547283 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cb94931-13cd-42d6-89e5-0862980caa67-ceph\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547305 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547331 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-lib-modules\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547504 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547535 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-sys\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547545 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547711 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-dev\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547737 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547761 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547826 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-run\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.547965 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.548204 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.552006 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.552464 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.552897 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.553404 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.557080 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.563222 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pck7h\" (UniqueName: \"kubernetes.io/projected/a32c6695-97c7-4d7a-86f7-7e1a6d736e56-kube-api-access-pck7h\") pod \"cinder-volume-volume1-0\" (UID: \"a32c6695-97c7-4d7a-86f7-7e1a6d736e56\") " pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.641777 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649344 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649408 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-scripts\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649435 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cb94931-13cd-42d6-89e5-0862980caa67-ceph\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649460 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-lib-modules\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649484 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-config-data\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649506 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-sys\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649502 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649519 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-config-data-custom\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649662 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649730 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649751 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649848 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649895 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4cns\" (UniqueName: \"kubernetes.io/projected/0cb94931-13cd-42d6-89e5-0862980caa67-kube-api-access-b4cns\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649938 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.649985 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-run\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.650079 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-nvme\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.650119 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-dev\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.650260 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-dev\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.650302 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.650264 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-lib-modules\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.653872 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-scripts\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.653982 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-run\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.653947 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.653967 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-sys\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.653912 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.654165 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.654209 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0cb94931-13cd-42d6-89e5-0862980caa67-etc-nvme\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.657315 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.660042 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-config-data\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.670375 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0cb94931-13cd-42d6-89e5-0862980caa67-config-data-custom\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.671269 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0cb94931-13cd-42d6-89e5-0862980caa67-ceph\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.672438 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4cns\" (UniqueName: \"kubernetes.io/projected/0cb94931-13cd-42d6-89e5-0862980caa67-kube-api-access-b4cns\") pod \"cinder-backup-0\" (UID: \"0cb94931-13cd-42d6-89e5-0862980caa67\") " pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.781050 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.932600 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-dm7jz"] Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.933634 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:39 crc kubenswrapper[4953]: I1011 03:39:39.945436 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-dm7jz"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.057769 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjm44\" (UniqueName: \"kubernetes.io/projected/b3470028-8d0f-4e6a-8c32-684343626ece-kube-api-access-cjm44\") pod \"manila-db-create-dm7jz\" (UID: \"b3470028-8d0f-4e6a-8c32-684343626ece\") " pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.160118 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjm44\" (UniqueName: \"kubernetes.io/projected/b3470028-8d0f-4e6a-8c32-684343626ece-kube-api-access-cjm44\") pod \"manila-db-create-dm7jz\" (UID: \"b3470028-8d0f-4e6a-8c32-684343626ece\") " pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.166087 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6888d5bb97-swtpx"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.169717 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.173585 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-nnnpn" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.173725 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.179959 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.181133 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.182714 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6888d5bb97-swtpx"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.187847 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjm44\" (UniqueName: \"kubernetes.io/projected/b3470028-8d0f-4e6a-8c32-684343626ece-kube-api-access-cjm44\") pod \"manila-db-create-dm7jz\" (UID: \"b3470028-8d0f-4e6a-8c32-684343626ece\") " pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.217412 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.219790 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.223745 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-87rn5" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.224150 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.224152 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.232804 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.258643 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262648 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4bc2f5-7862-4a70-9acd-05003ce96228-logs\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262727 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9d4bc2f5-7862-4a70-9acd-05003ce96228-horizon-secret-key\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262751 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262782 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-config-data\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262810 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnt6z\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-kube-api-access-xnt6z\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262876 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262919 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-scripts\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262968 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.262984 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.263002 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.263032 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-logs\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.263049 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2fp8\" (UniqueName: \"kubernetes.io/projected/9d4bc2f5-7862-4a70-9acd-05003ce96228-kube-api-access-f2fp8\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.263071 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.263096 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.263872 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-67f7c8b487-cv6l8"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.265559 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.298143 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.323900 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67f7c8b487-cv6l8"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.337297 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.339047 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.342242 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.342452 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.366775 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.366968 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367057 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367134 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29f04c85-f39d-499e-8f91-df5c94306234-logs\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367225 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367298 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-scripts\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367382 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-scripts\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367465 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjd2m\" (UniqueName: \"kubernetes.io/projected/29f04c85-f39d-499e-8f91-df5c94306234-kube-api-access-tjd2m\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367548 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-logs\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367646 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29f04c85-f39d-499e-8f91-df5c94306234-horizon-secret-key\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367727 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367800 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367868 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.367947 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-logs\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.368017 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.368486 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2fp8\" (UniqueName: \"kubernetes.io/projected/9d4bc2f5-7862-4a70-9acd-05003ce96228-kube-api-access-f2fp8\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.368680 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.368637 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.368941 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6ljq\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-kube-api-access-j6ljq\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369023 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369099 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4bc2f5-7862-4a70-9acd-05003ce96228-logs\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369194 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-config-data\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369273 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9d4bc2f5-7862-4a70-9acd-05003ce96228-horizon-secret-key\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369680 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369776 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-ceph\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.369845 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.370259 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.370419 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4bc2f5-7862-4a70-9acd-05003ce96228-logs\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.370876 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-scripts\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.381922 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-config-data\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.382045 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.382102 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnt6z\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-kube-api-access-xnt6z\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.396414 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-config-data\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.385885 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9d4bc2f5-7862-4a70-9acd-05003ce96228-horizon-secret-key\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.413901 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.416970 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2fp8\" (UniqueName: \"kubernetes.io/projected/9d4bc2f5-7862-4a70-9acd-05003ce96228-kube-api-access-f2fp8\") pod \"horizon-6888d5bb97-swtpx\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.428764 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.475484 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-logs\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: E1011 03:39:40.490276 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceph combined-ca-bundle config-data glance httpd-run kube-api-access-j6ljq logs public-tls-certs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-external-api-0" podUID="a6109815-b1b7-4a03-aaa5-3980017b06d2" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.490463 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.495408 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.498515 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-ceph\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.501173 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnt6z\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-kube-api-access-xnt6z\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503184 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503629 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503704 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6ljq\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-kube-api-access-j6ljq\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503786 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-config-data\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503840 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-ceph\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503865 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503909 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503946 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.503991 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504011 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29f04c85-f39d-499e-8f91-df5c94306234-logs\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504055 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504079 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-scripts\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504141 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjd2m\" (UniqueName: \"kubernetes.io/projected/29f04c85-f39d-499e-8f91-df5c94306234-kube-api-access-tjd2m\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504178 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-logs\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504211 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29f04c85-f39d-499e-8f91-df5c94306234-horizon-secret-key\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504882 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29f04c85-f39d-499e-8f91-df5c94306234-logs\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.504205 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.505774 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-config-data\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.506157 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-logs\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.506270 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.506347 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.506678 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-scripts\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.508534 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-config-data\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.509341 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.509510 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-scripts\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.512388 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29f04c85-f39d-499e-8f91-df5c94306234-horizon-secret-key\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.512518 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-ceph\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.513809 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.514054 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.519525 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.521395 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6ljq\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-kube-api-access-j6ljq\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.525489 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjd2m\" (UniqueName: \"kubernetes.io/projected/29f04c85-f39d-499e-8f91-df5c94306234-kube-api-access-tjd2m\") pod \"horizon-67f7c8b487-cv6l8\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.550222 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.581682 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.603309 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.611396 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.640897 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.784674 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"0cb94931-13cd-42d6-89e5-0862980caa67","Type":"ContainerStarted","Data":"71f6aa0d1058dd32a2040372ad470bb31a4e99517ce624ee37cb1eb4f8af02b8"} Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.803448 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.804047 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"a32c6695-97c7-4d7a-86f7-7e1a6d736e56","Type":"ContainerStarted","Data":"f20cdec5608cc942a504655537e6eafde0007ef25a9ef991f7661520a2ebe3d1"} Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.882148 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.924877 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-scripts\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.924940 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6ljq\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-kube-api-access-j6ljq\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.924971 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-logs\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.925009 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-ceph\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.925041 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-config-data\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.925072 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.925290 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-public-tls-certs\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.925406 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-httpd-run\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.925429 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-combined-ca-bundle\") pod \"a6109815-b1b7-4a03-aaa5-3980017b06d2\" (UID: \"a6109815-b1b7-4a03-aaa5-3980017b06d2\") " Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.926302 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.926727 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-logs" (OuterVolumeSpecName: "logs") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.932524 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.932574 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-scripts" (OuterVolumeSpecName: "scripts") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.932615 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.932627 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-config-data" (OuterVolumeSpecName: "config-data") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.934746 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-ceph" (OuterVolumeSpecName: "ceph") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.944818 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.960093 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-kube-api-access-j6ljq" (OuterVolumeSpecName: "kube-api-access-j6ljq") pod "a6109815-b1b7-4a03-aaa5-3980017b06d2" (UID: "a6109815-b1b7-4a03-aaa5-3980017b06d2"). InnerVolumeSpecName "kube-api-access-j6ljq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:40 crc kubenswrapper[4953]: I1011 03:39:40.975758 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-dm7jz"] Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028835 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028877 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028915 4953 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028927 4953 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028939 4953 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028951 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028960 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a6109815-b1b7-4a03-aaa5-3980017b06d2-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028970 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6ljq\" (UniqueName: \"kubernetes.io/projected/a6109815-b1b7-4a03-aaa5-3980017b06d2-kube-api-access-j6ljq\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.028980 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6109815-b1b7-4a03-aaa5-3980017b06d2-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.056215 4953 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.130910 4953 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.263092 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6888d5bb97-swtpx"] Oct 11 03:39:41 crc kubenswrapper[4953]: W1011 03:39:41.345768 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d4bc2f5_7862_4a70_9acd_05003ce96228.slice/crio-2f841f84737c3c78526be607b472b725e7258e68291ac6b133b0a6a963357382 WatchSource:0}: Error finding container 2f841f84737c3c78526be607b472b725e7258e68291ac6b133b0a6a963357382: Status 404 returned error can't find the container with id 2f841f84737c3c78526be607b472b725e7258e68291ac6b133b0a6a963357382 Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.422265 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.449442 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67f7c8b487-cv6l8"] Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.849215 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61888a33-62d6-4824-95d8-9891836524d0","Type":"ContainerStarted","Data":"0ed7a6960d3514bf97b9dc2fd671f16e2c465c7594213a8e951296fc4cd80e52"} Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.851251 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f7c8b487-cv6l8" event={"ID":"29f04c85-f39d-499e-8f91-df5c94306234","Type":"ContainerStarted","Data":"4d746e3c039ed655040c0502fb2f0cdc15934bc0a1942bf417c2d2ed05fe052f"} Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.858054 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"a32c6695-97c7-4d7a-86f7-7e1a6d736e56","Type":"ContainerStarted","Data":"79b2197ee672a162bbf317fc599673658a0b8a61d47eff3fa13d7addaa4ad70d"} Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.864209 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6888d5bb97-swtpx" event={"ID":"9d4bc2f5-7862-4a70-9acd-05003ce96228","Type":"ContainerStarted","Data":"2f841f84737c3c78526be607b472b725e7258e68291ac6b133b0a6a963357382"} Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.866143 4953 generic.go:334] "Generic (PLEG): container finished" podID="b3470028-8d0f-4e6a-8c32-684343626ece" containerID="edc830828377f6faa1c8b84385738117becf7071d2295e580adf7ce6e902e100" exitCode=0 Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.866188 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dm7jz" event={"ID":"b3470028-8d0f-4e6a-8c32-684343626ece","Type":"ContainerDied","Data":"edc830828377f6faa1c8b84385738117becf7071d2295e580adf7ce6e902e100"} Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.866205 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dm7jz" event={"ID":"b3470028-8d0f-4e6a-8c32-684343626ece","Type":"ContainerStarted","Data":"ff71fea00227913d345dc78069bdf05c1ae82db81bb694b13725680eddbdad55"} Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.866223 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.974642 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:41 crc kubenswrapper[4953]: I1011 03:39:41.983286 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.006177 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.012245 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.017284 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.018897 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.027754 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054132 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-logs\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054178 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054213 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-ceph\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054253 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054277 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh4pz\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-kube-api-access-bh4pz\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054307 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-scripts\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054352 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054389 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.054409 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-config-data\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.156455 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.156888 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.156923 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-config-data\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.156968 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-logs\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.157010 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.157048 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-ceph\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.157115 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.157147 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh4pz\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-kube-api-access-bh4pz\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.157186 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-scripts\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.158090 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-logs\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.158283 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.160095 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.163048 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-config-data\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.164324 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.165337 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.180567 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-ceph\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.183235 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-scripts\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.210712 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh4pz\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-kube-api-access-bh4pz\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.239540 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.363421 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.607425 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67f7c8b487-cv6l8"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.630349 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-597b9665b8-kw4fw"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.631979 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.636172 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.662700 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-597b9665b8-kw4fw"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.683275 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.712794 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6888d5bb97-swtpx"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715037 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-config-data\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715436 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vv7q\" (UniqueName: \"kubernetes.io/projected/de690dc0-a12a-4321-b838-27a54e039cb1-kube-api-access-6vv7q\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715482 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-tls-certs\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715554 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-secret-key\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715593 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-scripts\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715706 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de690dc0-a12a-4321-b838-27a54e039cb1-logs\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.715746 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-combined-ca-bundle\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.747078 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-d5ffc79dd-kqmlf"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.748506 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.751670 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5ffc79dd-kqmlf"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.758187 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819266 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de690dc0-a12a-4321-b838-27a54e039cb1-logs\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819347 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-horizon-secret-key\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819376 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-combined-ca-bundle\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819460 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-config-data\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819478 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-combined-ca-bundle\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819527 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vv7q\" (UniqueName: \"kubernetes.io/projected/de690dc0-a12a-4321-b838-27a54e039cb1-kube-api-access-6vv7q\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819543 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9aab956-be2a-4013-9071-a9cfb31c7da2-config-data\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819625 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-tls-certs\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819648 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9aab956-be2a-4013-9071-a9cfb31c7da2-scripts\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819674 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49q7m\" (UniqueName: \"kubernetes.io/projected/d9aab956-be2a-4013-9071-a9cfb31c7da2-kube-api-access-49q7m\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819730 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-secret-key\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819746 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9aab956-be2a-4013-9071-a9cfb31c7da2-logs\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819798 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-scripts\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.819834 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-horizon-tls-certs\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.820393 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de690dc0-a12a-4321-b838-27a54e039cb1-logs\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.826006 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-config-data\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.828255 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-scripts\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.833222 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-secret-key\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.834650 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-tls-certs\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.847282 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vv7q\" (UniqueName: \"kubernetes.io/projected/de690dc0-a12a-4321-b838-27a54e039cb1-kube-api-access-6vv7q\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.847490 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-combined-ca-bundle\") pod \"horizon-597b9665b8-kw4fw\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.880335 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"0cb94931-13cd-42d6-89e5-0862980caa67","Type":"ContainerStarted","Data":"adf98a4fd599c82eb5a1b76bd73333cca9c4b92df1b9648c579321bf870f93de"} Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.880405 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"0cb94931-13cd-42d6-89e5-0862980caa67","Type":"ContainerStarted","Data":"31f77f93d9e6ebb8347957474a6e477a947ff9f0bb2ddaefb7e6172904222b53"} Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.882905 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61888a33-62d6-4824-95d8-9891836524d0","Type":"ContainerStarted","Data":"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652"} Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.903753 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"a32c6695-97c7-4d7a-86f7-7e1a6d736e56","Type":"ContainerStarted","Data":"86c24a8187d9a0b4539ebca020d4ffb8fde1d1eb63fc45896beb2402612accea"} Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926695 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-combined-ca-bundle\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926748 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9aab956-be2a-4013-9071-a9cfb31c7da2-config-data\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926783 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9aab956-be2a-4013-9071-a9cfb31c7da2-scripts\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926810 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49q7m\" (UniqueName: \"kubernetes.io/projected/d9aab956-be2a-4013-9071-a9cfb31c7da2-kube-api-access-49q7m\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926875 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9aab956-be2a-4013-9071-a9cfb31c7da2-logs\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926931 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-horizon-tls-certs\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.926983 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-horizon-secret-key\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.930340 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9aab956-be2a-4013-9071-a9cfb31c7da2-logs\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.931369 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9aab956-be2a-4013-9071-a9cfb31c7da2-config-data\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.931422 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d9aab956-be2a-4013-9071-a9cfb31c7da2-scripts\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.949093 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-horizon-tls-certs\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.949942 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-combined-ca-bundle\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.966992 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.91862695 podStartE2EDuration="3.966971938s" podCreationTimestamp="2025-10-11 03:39:39 +0000 UTC" firstStartedPulling="2025-10-11 03:39:40.658803783 +0000 UTC m=+3191.591891427" lastFinishedPulling="2025-10-11 03:39:41.707148771 +0000 UTC m=+3192.640236415" observedRunningTime="2025-10-11 03:39:42.920678584 +0000 UTC m=+3193.853766238" watchObservedRunningTime="2025-10-11 03:39:42.966971938 +0000 UTC m=+3193.900059582" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.968484 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d9aab956-be2a-4013-9071-a9cfb31c7da2-horizon-secret-key\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.976145 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49q7m\" (UniqueName: \"kubernetes.io/projected/d9aab956-be2a-4013-9071-a9cfb31c7da2-kube-api-access-49q7m\") pod \"horizon-d5ffc79dd-kqmlf\" (UID: \"d9aab956-be2a-4013-9071-a9cfb31c7da2\") " pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.994496 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:39:42 crc kubenswrapper[4953]: I1011 03:39:42.997508 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.030526205 podStartE2EDuration="3.997497006s" podCreationTimestamp="2025-10-11 03:39:39 +0000 UTC" firstStartedPulling="2025-10-11 03:39:40.41733531 +0000 UTC m=+3191.350422944" lastFinishedPulling="2025-10-11 03:39:41.384306101 +0000 UTC m=+3192.317393745" observedRunningTime="2025-10-11 03:39:42.96862845 +0000 UTC m=+3193.901716094" watchObservedRunningTime="2025-10-11 03:39:42.997497006 +0000 UTC m=+3193.930584650" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.090253 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.110388 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.444294 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.541759 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjm44\" (UniqueName: \"kubernetes.io/projected/b3470028-8d0f-4e6a-8c32-684343626ece-kube-api-access-cjm44\") pod \"b3470028-8d0f-4e6a-8c32-684343626ece\" (UID: \"b3470028-8d0f-4e6a-8c32-684343626ece\") " Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.549715 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3470028-8d0f-4e6a-8c32-684343626ece-kube-api-access-cjm44" (OuterVolumeSpecName: "kube-api-access-cjm44") pod "b3470028-8d0f-4e6a-8c32-684343626ece" (UID: "b3470028-8d0f-4e6a-8c32-684343626ece"). InnerVolumeSpecName "kube-api-access-cjm44". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.648350 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjm44\" (UniqueName: \"kubernetes.io/projected/b3470028-8d0f-4e6a-8c32-684343626ece-kube-api-access-cjm44\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.650535 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-597b9665b8-kw4fw"] Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.816083 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6109815-b1b7-4a03-aaa5-3980017b06d2" path="/var/lib/kubelet/pods/a6109815-b1b7-4a03-aaa5-3980017b06d2/volumes" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.927055 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597b9665b8-kw4fw" event={"ID":"de690dc0-a12a-4321-b838-27a54e039cb1","Type":"ContainerStarted","Data":"5dde06d987d4fdcd610bc1c7f42f57a53b7801a16ee710f3a5023046f69e7e06"} Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.929266 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b971001f-3d23-493a-9367-297640d591da","Type":"ContainerStarted","Data":"cda24d744832a921bfa4c79646da0c3381dda3eada15278da0b49d960451bcd6"} Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.931819 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-dm7jz" event={"ID":"b3470028-8d0f-4e6a-8c32-684343626ece","Type":"ContainerDied","Data":"ff71fea00227913d345dc78069bdf05c1ae82db81bb694b13725680eddbdad55"} Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.931845 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-dm7jz" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.931857 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff71fea00227913d345dc78069bdf05c1ae82db81bb694b13725680eddbdad55" Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.934763 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-log" containerID="cri-o://061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652" gracePeriod=30 Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.935181 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61888a33-62d6-4824-95d8-9891836524d0","Type":"ContainerStarted","Data":"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903"} Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.935840 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-httpd" containerID="cri-o://ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903" gracePeriod=30 Oct 11 03:39:43 crc kubenswrapper[4953]: I1011 03:39:43.986502 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.98648582 podStartE2EDuration="3.98648582s" podCreationTimestamp="2025-10-11 03:39:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:39:43.98013025 +0000 UTC m=+3194.913217894" watchObservedRunningTime="2025-10-11 03:39:43.98648582 +0000 UTC m=+3194.919573464" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.047989 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5ffc79dd-kqmlf"] Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.544708 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.643001 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.674834 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-httpd-run\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.674997 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnt6z\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-kube-api-access-xnt6z\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675026 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675079 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-ceph\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675131 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-logs\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675196 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-scripts\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675244 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-internal-tls-certs\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675264 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-config-data\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.675346 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-combined-ca-bundle\") pod \"61888a33-62d6-4824-95d8-9891836524d0\" (UID: \"61888a33-62d6-4824-95d8-9891836524d0\") " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.677836 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.678053 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-logs" (OuterVolumeSpecName: "logs") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.681449 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-ceph" (OuterVolumeSpecName: "ceph") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.681496 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-kube-api-access-xnt6z" (OuterVolumeSpecName: "kube-api-access-xnt6z") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "kube-api-access-xnt6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.683273 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.711157 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-scripts" (OuterVolumeSpecName: "scripts") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.761293 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.770776 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.775263 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-config-data" (OuterVolumeSpecName: "config-data") pod "61888a33-62d6-4824-95d8-9891836524d0" (UID: "61888a33-62d6-4824-95d8-9891836524d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777465 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnt6z\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-kube-api-access-xnt6z\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777551 4953 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777566 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/61888a33-62d6-4824-95d8-9891836524d0-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777577 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777586 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777596 4953 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777643 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777655 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61888a33-62d6-4824-95d8-9891836524d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.777666 4953 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/61888a33-62d6-4824-95d8-9891836524d0-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.781367 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.818716 4953 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.882439 4953 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.943985 4953 generic.go:334] "Generic (PLEG): container finished" podID="61888a33-62d6-4824-95d8-9891836524d0" containerID="ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903" exitCode=0 Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.944015 4953 generic.go:334] "Generic (PLEG): container finished" podID="61888a33-62d6-4824-95d8-9891836524d0" containerID="061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652" exitCode=143 Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.944073 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.944068 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61888a33-62d6-4824-95d8-9891836524d0","Type":"ContainerDied","Data":"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903"} Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.944227 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61888a33-62d6-4824-95d8-9891836524d0","Type":"ContainerDied","Data":"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652"} Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.944249 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"61888a33-62d6-4824-95d8-9891836524d0","Type":"ContainerDied","Data":"0ed7a6960d3514bf97b9dc2fd671f16e2c465c7594213a8e951296fc4cd80e52"} Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.944269 4953 scope.go:117] "RemoveContainer" containerID="ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.946505 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5ffc79dd-kqmlf" event={"ID":"d9aab956-be2a-4013-9071-a9cfb31c7da2","Type":"ContainerStarted","Data":"0c618dd3fc8d95c4fabaf2a223188f5a3c98f8e8d86210ae6085a3db54e68a91"} Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.961830 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b971001f-3d23-493a-9367-297640d591da","Type":"ContainerStarted","Data":"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7"} Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.961875 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b971001f-3d23-493a-9367-297640d591da","Type":"ContainerStarted","Data":"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae"} Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.963050 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-log" containerID="cri-o://3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae" gracePeriod=30 Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.963181 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-httpd" containerID="cri-o://6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7" gracePeriod=30 Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.979703 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.980870 4953 scope.go:117] "RemoveContainer" containerID="061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652" Oct 11 03:39:44 crc kubenswrapper[4953]: I1011 03:39:44.992817 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.007263 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:45 crc kubenswrapper[4953]: E1011 03:39:45.007738 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-log" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.007754 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-log" Oct 11 03:39:45 crc kubenswrapper[4953]: E1011 03:39:45.007805 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3470028-8d0f-4e6a-8c32-684343626ece" containerName="mariadb-database-create" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.007813 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3470028-8d0f-4e6a-8c32-684343626ece" containerName="mariadb-database-create" Oct 11 03:39:45 crc kubenswrapper[4953]: E1011 03:39:45.007827 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-httpd" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.007832 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-httpd" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.008131 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-httpd" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.008150 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="61888a33-62d6-4824-95d8-9891836524d0" containerName="glance-log" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.008167 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3470028-8d0f-4e6a-8c32-684343626ece" containerName="mariadb-database-create" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.008728 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.008708781 podStartE2EDuration="4.008708781s" podCreationTimestamp="2025-10-11 03:39:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:39:44.99715941 +0000 UTC m=+3195.930247054" watchObservedRunningTime="2025-10-11 03:39:45.008708781 +0000 UTC m=+3195.941796425" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.009709 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.012533 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.014367 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.031872 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.059774 4953 scope.go:117] "RemoveContainer" containerID="ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903" Oct 11 03:39:45 crc kubenswrapper[4953]: E1011 03:39:45.060379 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903\": container with ID starting with ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903 not found: ID does not exist" containerID="ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.060409 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903"} err="failed to get container status \"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903\": rpc error: code = NotFound desc = could not find container \"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903\": container with ID starting with ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903 not found: ID does not exist" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.060429 4953 scope.go:117] "RemoveContainer" containerID="061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652" Oct 11 03:39:45 crc kubenswrapper[4953]: E1011 03:39:45.060980 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652\": container with ID starting with 061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652 not found: ID does not exist" containerID="061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.061004 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652"} err="failed to get container status \"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652\": rpc error: code = NotFound desc = could not find container \"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652\": container with ID starting with 061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652 not found: ID does not exist" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.061017 4953 scope.go:117] "RemoveContainer" containerID="ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.061325 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903"} err="failed to get container status \"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903\": rpc error: code = NotFound desc = could not find container \"ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903\": container with ID starting with ba23a8778944d2dad4a68918b0fa90d7ecc2a0df347ccccb65cfc46206dc2903 not found: ID does not exist" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.061352 4953 scope.go:117] "RemoveContainer" containerID="061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.061623 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652"} err="failed to get container status \"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652\": rpc error: code = NotFound desc = could not find container \"061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652\": container with ID starting with 061516a2ce7f65dfb7ca9266f02cf3b6a4f1929934d5e20979a5ef329a3c0652 not found: ID does not exist" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086423 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0746b898-8459-41c7-b3e8-251b5b3a2412-logs\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086488 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086549 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086582 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0746b898-8459-41c7-b3e8-251b5b3a2412-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086689 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086785 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfrtl\" (UniqueName: \"kubernetes.io/projected/0746b898-8459-41c7-b3e8-251b5b3a2412-kube-api-access-mfrtl\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086827 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086844 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.086924 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0746b898-8459-41c7-b3e8-251b5b3a2412-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189265 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0746b898-8459-41c7-b3e8-251b5b3a2412-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189527 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189655 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfrtl\" (UniqueName: \"kubernetes.io/projected/0746b898-8459-41c7-b3e8-251b5b3a2412-kube-api-access-mfrtl\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189704 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189730 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189791 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0746b898-8459-41c7-b3e8-251b5b3a2412-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189826 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0746b898-8459-41c7-b3e8-251b5b3a2412-logs\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189859 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.189904 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.195856 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.197962 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0746b898-8459-41c7-b3e8-251b5b3a2412-logs\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.198522 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0746b898-8459-41c7-b3e8-251b5b3a2412-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.199454 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.199969 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0746b898-8459-41c7-b3e8-251b5b3a2412-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.203542 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.205760 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.213801 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0746b898-8459-41c7-b3e8-251b5b3a2412-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.224756 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfrtl\" (UniqueName: \"kubernetes.io/projected/0746b898-8459-41c7-b3e8-251b5b3a2412-kube-api-access-mfrtl\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.235044 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"0746b898-8459-41c7-b3e8-251b5b3a2412\") " pod="openstack/glance-default-internal-api-0" Oct 11 03:39:45 crc kubenswrapper[4953]: I1011 03:39:45.416806 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.717253 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802142 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh4pz\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-kube-api-access-bh4pz\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802271 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-combined-ca-bundle\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802325 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-public-tls-certs\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802365 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-logs\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802477 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-scripts\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802534 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-config-data\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802638 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802658 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-ceph\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802697 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-httpd-run\") pod \"b971001f-3d23-493a-9367-297640d591da\" (UID: \"b971001f-3d23-493a-9367-297640d591da\") " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.802774 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-logs" (OuterVolumeSpecName: "logs") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.803193 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.804164 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.814220 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-scripts" (OuterVolumeSpecName: "scripts") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.819928 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-ceph" (OuterVolumeSpecName: "ceph") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.822365 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61888a33-62d6-4824-95d8-9891836524d0" path="/var/lib/kubelet/pods/61888a33-62d6-4824-95d8-9891836524d0/volumes" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.828061 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.835544 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-kube-api-access-bh4pz" (OuterVolumeSpecName: "kube-api-access-bh4pz") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "kube-api-access-bh4pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.847737 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.868435 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.904763 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-config-data" (OuterVolumeSpecName: "config-data") pod "b971001f-3d23-493a-9367-297640d591da" (UID: "b971001f-3d23-493a-9367-297640d591da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906376 4953 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906400 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906409 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906426 4953 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906434 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906444 4953 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b971001f-3d23-493a-9367-297640d591da-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906452 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh4pz\" (UniqueName: \"kubernetes.io/projected/b971001f-3d23-493a-9367-297640d591da-kube-api-access-bh4pz\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.906462 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b971001f-3d23-493a-9367-297640d591da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.930791 4953 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973387 4953 generic.go:334] "Generic (PLEG): container finished" podID="b971001f-3d23-493a-9367-297640d591da" containerID="6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7" exitCode=143 Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973415 4953 generic.go:334] "Generic (PLEG): container finished" podID="b971001f-3d23-493a-9367-297640d591da" containerID="3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae" exitCode=143 Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973435 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b971001f-3d23-493a-9367-297640d591da","Type":"ContainerDied","Data":"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7"} Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973461 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b971001f-3d23-493a-9367-297640d591da","Type":"ContainerDied","Data":"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae"} Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973459 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973483 4953 scope.go:117] "RemoveContainer" containerID="6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:45.973471 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b971001f-3d23-493a-9367-297640d591da","Type":"ContainerDied","Data":"cda24d744832a921bfa4c79646da0c3381dda3eada15278da0b49d960451bcd6"} Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.011351 4953 scope.go:117] "RemoveContainer" containerID="3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.013985 4953 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.022788 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.036934 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.048404 4953 scope.go:117] "RemoveContainer" containerID="6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.049439 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:46 crc kubenswrapper[4953]: E1011 03:39:46.050063 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-log" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.050076 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-log" Oct 11 03:39:46 crc kubenswrapper[4953]: E1011 03:39:46.050098 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-httpd" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.050104 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-httpd" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.050368 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-log" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.050390 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b971001f-3d23-493a-9367-297640d591da" containerName="glance-httpd" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.051674 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: E1011 03:39:46.052825 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7\": container with ID starting with 6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7 not found: ID does not exist" containerID="6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.052847 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7"} err="failed to get container status \"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7\": rpc error: code = NotFound desc = could not find container \"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7\": container with ID starting with 6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7 not found: ID does not exist" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.052870 4953 scope.go:117] "RemoveContainer" containerID="3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.054238 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.054653 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 11 03:39:46 crc kubenswrapper[4953]: E1011 03:39:46.059364 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae\": container with ID starting with 3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae not found: ID does not exist" containerID="3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.059385 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae"} err="failed to get container status \"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae\": rpc error: code = NotFound desc = could not find container \"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae\": container with ID starting with 3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae not found: ID does not exist" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.059403 4953 scope.go:117] "RemoveContainer" containerID="6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.059660 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.059754 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7"} err="failed to get container status \"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7\": rpc error: code = NotFound desc = could not find container \"6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7\": container with ID starting with 6b3a41bd7675ec4646f525efbeeecf14db4fdfc265ff15fd841c8665a9ddfca7 not found: ID does not exist" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.059778 4953 scope.go:117] "RemoveContainer" containerID="3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.060327 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae"} err="failed to get container status \"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae\": rpc error: code = NotFound desc = could not find container \"3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae\": container with ID starting with 3162b2753b2fb03f27e21ce1a6446f02b6b7d5d0eb3a103d2b2c19bff1b52cae not found: ID does not exist" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.115046 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-scripts\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.115878 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cd817825-541e-42b4-81dc-af9b352ce6c0-ceph\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.115941 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd817825-541e-42b4-81dc-af9b352ce6c0-logs\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.115971 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt2kt\" (UniqueName: \"kubernetes.io/projected/cd817825-541e-42b4-81dc-af9b352ce6c0-kube-api-access-wt2kt\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.115999 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.116046 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.116068 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.116196 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd817825-541e-42b4-81dc-af9b352ce6c0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.116242 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-config-data\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.218904 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd817825-541e-42b4-81dc-af9b352ce6c0-logs\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.218978 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt2kt\" (UniqueName: \"kubernetes.io/projected/cd817825-541e-42b4-81dc-af9b352ce6c0-kube-api-access-wt2kt\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219010 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219072 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219093 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219153 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd817825-541e-42b4-81dc-af9b352ce6c0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219179 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-config-data\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219247 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-scripts\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.219308 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cd817825-541e-42b4-81dc-af9b352ce6c0-ceph\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.221599 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd817825-541e-42b4-81dc-af9b352ce6c0-logs\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.221588 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.221771 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd817825-541e-42b4-81dc-af9b352ce6c0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.224074 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/cd817825-541e-42b4-81dc-af9b352ce6c0-ceph\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.224340 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.228490 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-config-data\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.235671 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.237690 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd817825-541e-42b4-81dc-af9b352ce6c0-scripts\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.241124 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt2kt\" (UniqueName: \"kubernetes.io/projected/cd817825-541e-42b4-81dc-af9b352ce6c0-kube-api-access-wt2kt\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.266797 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-external-api-0\" (UID: \"cd817825-541e-42b4-81dc-af9b352ce6c0\") " pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.374067 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.750197 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 11 03:39:46 crc kubenswrapper[4953]: I1011 03:39:46.986728 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0746b898-8459-41c7-b3e8-251b5b3a2412","Type":"ContainerStarted","Data":"162fa7c98851b84270d7e6cabb5e7261b0bc3d5218135161b53ddffaceb68a54"} Oct 11 03:39:47 crc kubenswrapper[4953]: I1011 03:39:47.139038 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 11 03:39:47 crc kubenswrapper[4953]: I1011 03:39:47.814385 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b971001f-3d23-493a-9367-297640d591da" path="/var/lib/kubelet/pods/b971001f-3d23-493a-9367-297640d591da/volumes" Oct 11 03:39:47 crc kubenswrapper[4953]: I1011 03:39:47.997145 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd817825-541e-42b4-81dc-af9b352ce6c0","Type":"ContainerStarted","Data":"cbf34a466c467c52259063ff9e09503c96fd6f05bc9b02b1b2bb83d9effb1256"} Oct 11 03:39:47 crc kubenswrapper[4953]: I1011 03:39:47.997186 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd817825-541e-42b4-81dc-af9b352ce6c0","Type":"ContainerStarted","Data":"7ea47444fdcea089d67d1504f0043fd1dbebdb0e239970a0204d7a4fbfa80525"} Oct 11 03:39:47 crc kubenswrapper[4953]: I1011 03:39:47.999767 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0746b898-8459-41c7-b3e8-251b5b3a2412","Type":"ContainerStarted","Data":"207488a47d3409d6d3358cff578a4d16a8c6b230c9911911205603276128a2f5"} Oct 11 03:39:49 crc kubenswrapper[4953]: I1011 03:39:49.836037 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.021596 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-94a2-account-create-cq9hf"] Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.023249 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.025970 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.030375 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-94a2-account-create-cq9hf"] Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.047985 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.221495 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcntk\" (UniqueName: \"kubernetes.io/projected/19a5077f-644f-4bc8-aa46-e2407d412716-kube-api-access-wcntk\") pod \"manila-94a2-account-create-cq9hf\" (UID: \"19a5077f-644f-4bc8-aa46-e2407d412716\") " pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.324568 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcntk\" (UniqueName: \"kubernetes.io/projected/19a5077f-644f-4bc8-aa46-e2407d412716-kube-api-access-wcntk\") pod \"manila-94a2-account-create-cq9hf\" (UID: \"19a5077f-644f-4bc8-aa46-e2407d412716\") " pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.344790 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcntk\" (UniqueName: \"kubernetes.io/projected/19a5077f-644f-4bc8-aa46-e2407d412716-kube-api-access-wcntk\") pod \"manila-94a2-account-create-cq9hf\" (UID: \"19a5077f-644f-4bc8-aa46-e2407d412716\") " pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:50 crc kubenswrapper[4953]: I1011 03:39:50.352478 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:51 crc kubenswrapper[4953]: I1011 03:39:51.795815 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:39:51 crc kubenswrapper[4953]: E1011 03:39:51.796279 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:39:52 crc kubenswrapper[4953]: I1011 03:39:52.741074 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-94a2-account-create-cq9hf"] Oct 11 03:39:52 crc kubenswrapper[4953]: W1011 03:39:52.755577 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19a5077f_644f_4bc8_aa46_e2407d412716.slice/crio-42f67153ffceacdcfda998bebf963f3906d50d7812fa6aecf264b2f5c9e474b7 WatchSource:0}: Error finding container 42f67153ffceacdcfda998bebf963f3906d50d7812fa6aecf264b2f5c9e474b7: Status 404 returned error can't find the container with id 42f67153ffceacdcfda998bebf963f3906d50d7812fa6aecf264b2f5c9e474b7 Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.066289 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6888d5bb97-swtpx" event={"ID":"9d4bc2f5-7862-4a70-9acd-05003ce96228","Type":"ContainerStarted","Data":"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.067271 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6888d5bb97-swtpx" event={"ID":"9d4bc2f5-7862-4a70-9acd-05003ce96228","Type":"ContainerStarted","Data":"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.066982 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6888d5bb97-swtpx" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon" containerID="cri-o://a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629" gracePeriod=30 Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.066326 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6888d5bb97-swtpx" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon-log" containerID="cri-o://2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f" gracePeriod=30 Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.093786 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0746b898-8459-41c7-b3e8-251b5b3a2412","Type":"ContainerStarted","Data":"7fb4ed6f8cdb4a6d010a5e70287757e5c473c860f62b82b2943d5a54bfdb59d1"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.103394 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6888d5bb97-swtpx" podStartSLOduration=2.024901502 podStartE2EDuration="13.103373255s" podCreationTimestamp="2025-10-11 03:39:40 +0000 UTC" firstStartedPulling="2025-10-11 03:39:41.349227008 +0000 UTC m=+3192.282314652" lastFinishedPulling="2025-10-11 03:39:52.427698761 +0000 UTC m=+3203.360786405" observedRunningTime="2025-10-11 03:39:53.089137717 +0000 UTC m=+3204.022225361" watchObservedRunningTime="2025-10-11 03:39:53.103373255 +0000 UTC m=+3204.036460909" Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.105244 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-94a2-account-create-cq9hf" event={"ID":"19a5077f-644f-4bc8-aa46-e2407d412716","Type":"ContainerStarted","Data":"42f67153ffceacdcfda998bebf963f3906d50d7812fa6aecf264b2f5c9e474b7"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.123971 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f7c8b487-cv6l8" event={"ID":"29f04c85-f39d-499e-8f91-df5c94306234","Type":"ContainerStarted","Data":"fbc9669ed94dd2101222cedab508c1eb94a7db6c6ab66b9fcb455894458d7b2e"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.124016 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f7c8b487-cv6l8" event={"ID":"29f04c85-f39d-499e-8f91-df5c94306234","Type":"ContainerStarted","Data":"5d6a9726c8534c7f2b0628df4a51959661a37a9da74af0ed4b0b1bfba30fd1b1"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.124126 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-67f7c8b487-cv6l8" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon-log" containerID="cri-o://5d6a9726c8534c7f2b0628df4a51959661a37a9da74af0ed4b0b1bfba30fd1b1" gracePeriod=30 Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.124362 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-67f7c8b487-cv6l8" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon" containerID="cri-o://fbc9669ed94dd2101222cedab508c1eb94a7db6c6ab66b9fcb455894458d7b2e" gracePeriod=30 Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.129584 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.129557204 podStartE2EDuration="9.129557204s" podCreationTimestamp="2025-10-11 03:39:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:39:53.114034763 +0000 UTC m=+3204.047122417" watchObservedRunningTime="2025-10-11 03:39:53.129557204 +0000 UTC m=+3204.062644848" Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.131842 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-94a2-account-create-cq9hf" podStartSLOduration=4.131822661 podStartE2EDuration="4.131822661s" podCreationTimestamp="2025-10-11 03:39:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:39:53.130642621 +0000 UTC m=+3204.063730265" watchObservedRunningTime="2025-10-11 03:39:53.131822661 +0000 UTC m=+3204.064910315" Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.135283 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5ffc79dd-kqmlf" event={"ID":"d9aab956-be2a-4013-9071-a9cfb31c7da2","Type":"ContainerStarted","Data":"11c5147325c6d1e76b960e4ee3c35d4e28a32836663b1a2b5bf41934d9e2bda8"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.140545 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597b9665b8-kw4fw" event={"ID":"de690dc0-a12a-4321-b838-27a54e039cb1","Type":"ContainerStarted","Data":"9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.140574 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597b9665b8-kw4fw" event={"ID":"de690dc0-a12a-4321-b838-27a54e039cb1","Type":"ContainerStarted","Data":"f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f"} Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.163735 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-67f7c8b487-cv6l8" podStartSLOduration=2.21166817 podStartE2EDuration="13.163714303s" podCreationTimestamp="2025-10-11 03:39:40 +0000 UTC" firstStartedPulling="2025-10-11 03:39:41.465868262 +0000 UTC m=+3192.398955906" lastFinishedPulling="2025-10-11 03:39:52.417914395 +0000 UTC m=+3203.351002039" observedRunningTime="2025-10-11 03:39:53.154889381 +0000 UTC m=+3204.087977045" watchObservedRunningTime="2025-10-11 03:39:53.163714303 +0000 UTC m=+3204.096801947" Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.178773 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-597b9665b8-kw4fw" podStartSLOduration=2.444273794 podStartE2EDuration="11.178755461s" podCreationTimestamp="2025-10-11 03:39:42 +0000 UTC" firstStartedPulling="2025-10-11 03:39:43.692764792 +0000 UTC m=+3194.625852436" lastFinishedPulling="2025-10-11 03:39:52.427246459 +0000 UTC m=+3203.360334103" observedRunningTime="2025-10-11 03:39:53.175316045 +0000 UTC m=+3204.108403709" watchObservedRunningTime="2025-10-11 03:39:53.178755461 +0000 UTC m=+3204.111843105" Oct 11 03:39:53 crc kubenswrapper[4953]: I1011 03:39:53.202149 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-d5ffc79dd-kqmlf" podStartSLOduration=2.8432512389999998 podStartE2EDuration="11.202124489s" podCreationTimestamp="2025-10-11 03:39:42 +0000 UTC" firstStartedPulling="2025-10-11 03:39:44.06997618 +0000 UTC m=+3195.003063824" lastFinishedPulling="2025-10-11 03:39:52.42884943 +0000 UTC m=+3203.361937074" observedRunningTime="2025-10-11 03:39:53.197664217 +0000 UTC m=+3204.130751871" watchObservedRunningTime="2025-10-11 03:39:53.202124489 +0000 UTC m=+3204.135212133" Oct 11 03:39:54 crc kubenswrapper[4953]: I1011 03:39:54.152689 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd817825-541e-42b4-81dc-af9b352ce6c0","Type":"ContainerStarted","Data":"9ace7903c9306068bd87471be4d3b3c7f39f86cd4dfa3fc5e716cacb6ad50279"} Oct 11 03:39:54 crc kubenswrapper[4953]: I1011 03:39:54.156062 4953 generic.go:334] "Generic (PLEG): container finished" podID="19a5077f-644f-4bc8-aa46-e2407d412716" containerID="1f27c6e9dc9f8f41e4d404a696583f15a3abe106643b1a91cf8a82e276ed0766" exitCode=0 Oct 11 03:39:54 crc kubenswrapper[4953]: I1011 03:39:54.156167 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-94a2-account-create-cq9hf" event={"ID":"19a5077f-644f-4bc8-aa46-e2407d412716","Type":"ContainerDied","Data":"1f27c6e9dc9f8f41e4d404a696583f15a3abe106643b1a91cf8a82e276ed0766"} Oct 11 03:39:54 crc kubenswrapper[4953]: I1011 03:39:54.158936 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5ffc79dd-kqmlf" event={"ID":"d9aab956-be2a-4013-9071-a9cfb31c7da2","Type":"ContainerStarted","Data":"ccefd900b73711f633ba860bef6f62a6d7f2f65fc36bab54f0b0982c23ff7291"} Oct 11 03:39:54 crc kubenswrapper[4953]: I1011 03:39:54.181571 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.181554553 podStartE2EDuration="8.181554553s" podCreationTimestamp="2025-10-11 03:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:39:54.174529046 +0000 UTC m=+3205.107616690" watchObservedRunningTime="2025-10-11 03:39:54.181554553 +0000 UTC m=+3205.114642197" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.417550 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.418190 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.455316 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.471728 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.580340 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.649892 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcntk\" (UniqueName: \"kubernetes.io/projected/19a5077f-644f-4bc8-aa46-e2407d412716-kube-api-access-wcntk\") pod \"19a5077f-644f-4bc8-aa46-e2407d412716\" (UID: \"19a5077f-644f-4bc8-aa46-e2407d412716\") " Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.656828 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19a5077f-644f-4bc8-aa46-e2407d412716-kube-api-access-wcntk" (OuterVolumeSpecName: "kube-api-access-wcntk") pod "19a5077f-644f-4bc8-aa46-e2407d412716" (UID: "19a5077f-644f-4bc8-aa46-e2407d412716"). InnerVolumeSpecName "kube-api-access-wcntk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:39:55 crc kubenswrapper[4953]: I1011 03:39:55.753392 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcntk\" (UniqueName: \"kubernetes.io/projected/19a5077f-644f-4bc8-aa46-e2407d412716-kube-api-access-wcntk\") on node \"crc\" DevicePath \"\"" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.180945 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-94a2-account-create-cq9hf" event={"ID":"19a5077f-644f-4bc8-aa46-e2407d412716","Type":"ContainerDied","Data":"42f67153ffceacdcfda998bebf963f3906d50d7812fa6aecf264b2f5c9e474b7"} Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.180993 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f67153ffceacdcfda998bebf963f3906d50d7812fa6aecf264b2f5c9e474b7" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.181013 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-94a2-account-create-cq9hf" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.181626 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.181649 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.375055 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.375102 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.402987 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 11 03:39:56 crc kubenswrapper[4953]: I1011 03:39:56.427720 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 11 03:39:57 crc kubenswrapper[4953]: I1011 03:39:57.187967 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 11 03:39:57 crc kubenswrapper[4953]: I1011 03:39:57.188024 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 11 03:39:58 crc kubenswrapper[4953]: I1011 03:39:58.352910 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 11 03:39:59 crc kubenswrapper[4953]: I1011 03:39:59.410196 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.173848 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.271107 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.383641 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-7z484"] Oct 11 03:40:00 crc kubenswrapper[4953]: E1011 03:40:00.384006 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19a5077f-644f-4bc8-aa46-e2407d412716" containerName="mariadb-account-create" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.384030 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="19a5077f-644f-4bc8-aa46-e2407d412716" containerName="mariadb-account-create" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.384233 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="19a5077f-644f-4bc8-aa46-e2407d412716" containerName="mariadb-account-create" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.385328 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.389119 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.389280 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-5vpnv" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.397539 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-7z484"] Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.464743 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-job-config-data\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.464900 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8xlc\" (UniqueName: \"kubernetes.io/projected/b9c2b416-67ff-4474-ad70-013c9b0e17d5-kube-api-access-x8xlc\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.464951 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-combined-ca-bundle\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.465126 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-config-data\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.505183 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.567591 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8xlc\" (UniqueName: \"kubernetes.io/projected/b9c2b416-67ff-4474-ad70-013c9b0e17d5-kube-api-access-x8xlc\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.567655 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-combined-ca-bundle\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.567710 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-config-data\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.567786 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-job-config-data\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.580406 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-job-config-data\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.580670 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-config-data\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.580903 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-combined-ca-bundle\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.585951 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8xlc\" (UniqueName: \"kubernetes.io/projected/b9c2b416-67ff-4474-ad70-013c9b0e17d5-kube-api-access-x8xlc\") pod \"manila-db-sync-7z484\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " pod="openstack/manila-db-sync-7z484" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.614710 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:40:00 crc kubenswrapper[4953]: I1011 03:40:00.714460 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7z484" Oct 11 03:40:01 crc kubenswrapper[4953]: I1011 03:40:01.478783 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-7z484"] Oct 11 03:40:02 crc kubenswrapper[4953]: I1011 03:40:02.230931 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7z484" event={"ID":"b9c2b416-67ff-4474-ad70-013c9b0e17d5","Type":"ContainerStarted","Data":"ada4c026c7e8ac93dd0573d8ab8f82b641ad31ba1e3065f6f9fae7db3ff3f34c"} Oct 11 03:40:02 crc kubenswrapper[4953]: I1011 03:40:02.995281 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:02 crc kubenswrapper[4953]: I1011 03:40:02.995647 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:03 crc kubenswrapper[4953]: I1011 03:40:03.091401 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:40:03 crc kubenswrapper[4953]: I1011 03:40:03.091459 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:40:03 crc kubenswrapper[4953]: I1011 03:40:03.093029 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-d5ffc79dd-kqmlf" podUID="d9aab956-be2a-4013-9071-a9cfb31c7da2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.248:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.248:8443: connect: connection refused" Oct 11 03:40:03 crc kubenswrapper[4953]: I1011 03:40:03.798249 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:40:03 crc kubenswrapper[4953]: E1011 03:40:03.798770 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:40:06 crc kubenswrapper[4953]: I1011 03:40:06.267691 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7z484" event={"ID":"b9c2b416-67ff-4474-ad70-013c9b0e17d5","Type":"ContainerStarted","Data":"18ab27da3818c7432bf3f7e17e422c062b6e18d406bee81e30eb6b984fb1cc6f"} Oct 11 03:40:06 crc kubenswrapper[4953]: I1011 03:40:06.287457 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-7z484" podStartSLOduration=2.000065421 podStartE2EDuration="6.287438526s" podCreationTimestamp="2025-10-11 03:40:00 +0000 UTC" firstStartedPulling="2025-10-11 03:40:01.49508448 +0000 UTC m=+3212.428172114" lastFinishedPulling="2025-10-11 03:40:05.782457575 +0000 UTC m=+3216.715545219" observedRunningTime="2025-10-11 03:40:06.282678577 +0000 UTC m=+3217.215766231" watchObservedRunningTime="2025-10-11 03:40:06.287438526 +0000 UTC m=+3217.220526170" Oct 11 03:40:15 crc kubenswrapper[4953]: I1011 03:40:15.196960 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:15 crc kubenswrapper[4953]: I1011 03:40:15.211899 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:40:15 crc kubenswrapper[4953]: I1011 03:40:15.795953 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:40:16 crc kubenswrapper[4953]: I1011 03:40:16.358439 4953 generic.go:334] "Generic (PLEG): container finished" podID="b9c2b416-67ff-4474-ad70-013c9b0e17d5" containerID="18ab27da3818c7432bf3f7e17e422c062b6e18d406bee81e30eb6b984fb1cc6f" exitCode=0 Oct 11 03:40:16 crc kubenswrapper[4953]: I1011 03:40:16.358674 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7z484" event={"ID":"b9c2b416-67ff-4474-ad70-013c9b0e17d5","Type":"ContainerDied","Data":"18ab27da3818c7432bf3f7e17e422c062b6e18d406bee81e30eb6b984fb1cc6f"} Oct 11 03:40:16 crc kubenswrapper[4953]: I1011 03:40:16.361434 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"29609757fed816debe2f1f67dd52659345c9175b745f0b4aecb35fdb37e3f952"} Oct 11 03:40:16 crc kubenswrapper[4953]: I1011 03:40:16.835814 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:16 crc kubenswrapper[4953]: I1011 03:40:16.856363 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-d5ffc79dd-kqmlf" Oct 11 03:40:16 crc kubenswrapper[4953]: I1011 03:40:16.960798 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-597b9665b8-kw4fw"] Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.369460 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-597b9665b8-kw4fw" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon-log" containerID="cri-o://f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f" gracePeriod=30 Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.369565 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-597b9665b8-kw4fw" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" containerID="cri-o://9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6" gracePeriod=30 Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.828558 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7z484" Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.952351 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-config-data\") pod \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.952398 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-combined-ca-bundle\") pod \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.953170 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8xlc\" (UniqueName: \"kubernetes.io/projected/b9c2b416-67ff-4474-ad70-013c9b0e17d5-kube-api-access-x8xlc\") pod \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.953249 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-job-config-data\") pod \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\" (UID: \"b9c2b416-67ff-4474-ad70-013c9b0e17d5\") " Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.958133 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9c2b416-67ff-4474-ad70-013c9b0e17d5-kube-api-access-x8xlc" (OuterVolumeSpecName: "kube-api-access-x8xlc") pod "b9c2b416-67ff-4474-ad70-013c9b0e17d5" (UID: "b9c2b416-67ff-4474-ad70-013c9b0e17d5"). InnerVolumeSpecName "kube-api-access-x8xlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.960666 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-config-data" (OuterVolumeSpecName: "config-data") pod "b9c2b416-67ff-4474-ad70-013c9b0e17d5" (UID: "b9c2b416-67ff-4474-ad70-013c9b0e17d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.963971 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "b9c2b416-67ff-4474-ad70-013c9b0e17d5" (UID: "b9c2b416-67ff-4474-ad70-013c9b0e17d5"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:17 crc kubenswrapper[4953]: I1011 03:40:17.986772 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9c2b416-67ff-4474-ad70-013c9b0e17d5" (UID: "b9c2b416-67ff-4474-ad70-013c9b0e17d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.055163 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8xlc\" (UniqueName: \"kubernetes.io/projected/b9c2b416-67ff-4474-ad70-013c9b0e17d5-kube-api-access-x8xlc\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.055198 4953 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-job-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.055207 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.055217 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2b416-67ff-4474-ad70-013c9b0e17d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.379206 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7z484" event={"ID":"b9c2b416-67ff-4474-ad70-013c9b0e17d5","Type":"ContainerDied","Data":"ada4c026c7e8ac93dd0573d8ab8f82b641ad31ba1e3065f6f9fae7db3ff3f34c"} Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.379245 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7z484" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.379249 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ada4c026c7e8ac93dd0573d8ab8f82b641ad31ba1e3065f6f9fae7db3ff3f34c" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.755430 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:18 crc kubenswrapper[4953]: E1011 03:40:18.755931 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9c2b416-67ff-4474-ad70-013c9b0e17d5" containerName="manila-db-sync" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.755957 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9c2b416-67ff-4474-ad70-013c9b0e17d5" containerName="manila-db-sync" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.756455 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9c2b416-67ff-4474-ad70-013c9b0e17d5" containerName="manila-db-sync" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.759372 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.761290 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-5vpnv" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.762132 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.762461 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.762739 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.775162 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.817039 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.818730 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.822381 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.822517 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76b5fdb995-x4cwh"] Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.824456 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873308 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873356 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw46b\" (UniqueName: \"kubernetes.io/projected/c829e7d9-35e6-4927-8a22-0eee6c0e4846-kube-api-access-xw46b\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873383 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873406 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-dns-svc\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873421 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2ndj\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-kube-api-access-s2ndj\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873438 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873454 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873471 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-ceph\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873493 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-openstack-edpm-ipam\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873521 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873534 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-scripts\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873554 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-config\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873569 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-scripts\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873592 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-ovsdbserver-sb\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.873645 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.874510 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-ovsdbserver-nb\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.874545 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gspmx\" (UniqueName: \"kubernetes.io/projected/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-kube-api-access-gspmx\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.874652 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.874673 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.874770 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.881285 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76b5fdb995-x4cwh"] Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.905666 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978139 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978192 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw46b\" (UniqueName: \"kubernetes.io/projected/c829e7d9-35e6-4927-8a22-0eee6c0e4846-kube-api-access-xw46b\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978223 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978244 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-dns-svc\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978264 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2ndj\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-kube-api-access-s2ndj\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978288 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978311 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978306 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978335 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-ceph\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978387 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-openstack-edpm-ipam\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978420 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978435 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-scripts\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978478 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-scripts\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978494 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-config\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978512 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-ovsdbserver-sb\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978552 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978570 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gspmx\" (UniqueName: \"kubernetes.io/projected/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-kube-api-access-gspmx\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978586 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-ovsdbserver-nb\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978624 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978645 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978696 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.978783 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.979288 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-dns-svc\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.979918 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-config\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.980158 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-ovsdbserver-sb\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.980279 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.982378 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-ovsdbserver-nb\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.984386 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c829e7d9-35e6-4927-8a22-0eee6c0e4846-openstack-edpm-ipam\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.986033 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.986297 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-scripts\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.986997 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.987032 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.989491 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.991401 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.992339 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.995992 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-scripts\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.996232 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw46b\" (UniqueName: \"kubernetes.io/projected/c829e7d9-35e6-4927-8a22-0eee6c0e4846-kube-api-access-xw46b\") pod \"dnsmasq-dns-76b5fdb995-x4cwh\" (UID: \"c829e7d9-35e6-4927-8a22-0eee6c0e4846\") " pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:18 crc kubenswrapper[4953]: I1011 03:40:18.999664 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-ceph\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.001808 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2ndj\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-kube-api-access-s2ndj\") pod \"manila-share-share1-0\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.002269 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gspmx\" (UniqueName: \"kubernetes.io/projected/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-kube-api-access-gspmx\") pod \"manila-scheduler-0\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.039722 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.043015 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.047014 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.053091 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.080925 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ddsb\" (UniqueName: \"kubernetes.io/projected/f73097cc-de76-4103-9f7a-708464ebbbc1-kube-api-access-4ddsb\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.081015 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73097cc-de76-4103-9f7a-708464ebbbc1-logs\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.081062 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73097cc-de76-4103-9f7a-708464ebbbc1-etc-machine-id\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.081082 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.081105 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data-custom\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.081123 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.081155 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-scripts\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.103138 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.159002 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.177252 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.183805 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73097cc-de76-4103-9f7a-708464ebbbc1-etc-machine-id\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.183850 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.183876 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data-custom\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.183894 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.183929 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-scripts\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.184005 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ddsb\" (UniqueName: \"kubernetes.io/projected/f73097cc-de76-4103-9f7a-708464ebbbc1-kube-api-access-4ddsb\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.184059 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73097cc-de76-4103-9f7a-708464ebbbc1-logs\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.184272 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73097cc-de76-4103-9f7a-708464ebbbc1-etc-machine-id\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.187864 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73097cc-de76-4103-9f7a-708464ebbbc1-logs\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.187941 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.194250 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data-custom\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.195166 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-scripts\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.205427 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.210760 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ddsb\" (UniqueName: \"kubernetes.io/projected/f73097cc-de76-4103-9f7a-708464ebbbc1-kube-api-access-4ddsb\") pod \"manila-api-0\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.420560 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.632485 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.740598 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76b5fdb995-x4cwh"] Oct 11 03:40:19 crc kubenswrapper[4953]: I1011 03:40:19.874916 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.025178 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.407774 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f73097cc-de76-4103-9f7a-708464ebbbc1","Type":"ContainerStarted","Data":"8d542336259190c90e2118cb0bb05715aaed2d6345eff136a7cb474595a7135d"} Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.409437 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f","Type":"ContainerStarted","Data":"278e6888e2cba7b9f01d73181e3eb548a680e36ef6776ecef158f0b845181919"} Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.416675 4953 generic.go:334] "Generic (PLEG): container finished" podID="c829e7d9-35e6-4927-8a22-0eee6c0e4846" containerID="59bdc6252d65accba0958f8089fd04bb4be2690521478204326c1340f85f0bb6" exitCode=0 Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.416769 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" event={"ID":"c829e7d9-35e6-4927-8a22-0eee6c0e4846","Type":"ContainerDied","Data":"59bdc6252d65accba0958f8089fd04bb4be2690521478204326c1340f85f0bb6"} Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.416799 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" event={"ID":"c829e7d9-35e6-4927-8a22-0eee6c0e4846","Type":"ContainerStarted","Data":"22f18db2d521203bfe47fe735db95135255489956d449cbf652c253e395428c6"} Oct 11 03:40:20 crc kubenswrapper[4953]: I1011 03:40:20.419803 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001","Type":"ContainerStarted","Data":"1b82b0e2c617acbb045bc692aeac9e6d7e50119abdc2ca664d99ddf13e24352f"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.442267 4953 generic.go:334] "Generic (PLEG): container finished" podID="de690dc0-a12a-4321-b838-27a54e039cb1" containerID="9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6" exitCode=0 Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.442350 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597b9665b8-kw4fw" event={"ID":"de690dc0-a12a-4321-b838-27a54e039cb1","Type":"ContainerDied","Data":"9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.464865 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f73097cc-de76-4103-9f7a-708464ebbbc1","Type":"ContainerStarted","Data":"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.464920 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f73097cc-de76-4103-9f7a-708464ebbbc1","Type":"ContainerStarted","Data":"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.464970 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.473593 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f","Type":"ContainerStarted","Data":"480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.473645 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f","Type":"ContainerStarted","Data":"e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.481908 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" event={"ID":"c829e7d9-35e6-4927-8a22-0eee6c0e4846","Type":"ContainerStarted","Data":"a3de4672238337b22215d66cd036ac9f2f565722e2b69eed77d6a8d5c6a994b6"} Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.482806 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.489159 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=2.489139835 podStartE2EDuration="2.489139835s" podCreationTimestamp="2025-10-11 03:40:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:40:21.480048286 +0000 UTC m=+3232.413135930" watchObservedRunningTime="2025-10-11 03:40:21.489139835 +0000 UTC m=+3232.422227479" Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.508448 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.861752725 podStartE2EDuration="3.50842238s" podCreationTimestamp="2025-10-11 03:40:18 +0000 UTC" firstStartedPulling="2025-10-11 03:40:19.629884341 +0000 UTC m=+3230.562971985" lastFinishedPulling="2025-10-11 03:40:20.276553996 +0000 UTC m=+3231.209641640" observedRunningTime="2025-10-11 03:40:21.498158862 +0000 UTC m=+3232.431246506" watchObservedRunningTime="2025-10-11 03:40:21.50842238 +0000 UTC m=+3232.441510024" Oct 11 03:40:21 crc kubenswrapper[4953]: I1011 03:40:21.518825 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" podStartSLOduration=3.518799661 podStartE2EDuration="3.518799661s" podCreationTimestamp="2025-10-11 03:40:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:40:21.51798308 +0000 UTC m=+3232.451070744" watchObservedRunningTime="2025-10-11 03:40:21.518799661 +0000 UTC m=+3232.451887305" Oct 11 03:40:22 crc kubenswrapper[4953]: I1011 03:40:22.324446 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:22 crc kubenswrapper[4953]: I1011 03:40:22.996187 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-597b9665b8-kw4fw" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.247:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.247:8443: connect: connection refused" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.546047 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562708 4953 generic.go:334] "Generic (PLEG): container finished" podID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerID="a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629" exitCode=137 Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562740 4953 generic.go:334] "Generic (PLEG): container finished" podID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerID="2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f" exitCode=137 Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562772 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6888d5bb97-swtpx" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562818 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6888d5bb97-swtpx" event={"ID":"9d4bc2f5-7862-4a70-9acd-05003ce96228","Type":"ContainerDied","Data":"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629"} Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562845 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6888d5bb97-swtpx" event={"ID":"9d4bc2f5-7862-4a70-9acd-05003ce96228","Type":"ContainerDied","Data":"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f"} Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562855 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6888d5bb97-swtpx" event={"ID":"9d4bc2f5-7862-4a70-9acd-05003ce96228","Type":"ContainerDied","Data":"2f841f84737c3c78526be607b472b725e7258e68291ac6b133b0a6a963357382"} Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.562870 4953 scope.go:117] "RemoveContainer" containerID="a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.578584 4953 generic.go:334] "Generic (PLEG): container finished" podID="29f04c85-f39d-499e-8f91-df5c94306234" containerID="fbc9669ed94dd2101222cedab508c1eb94a7db6c6ab66b9fcb455894458d7b2e" exitCode=137 Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.578634 4953 generic.go:334] "Generic (PLEG): container finished" podID="29f04c85-f39d-499e-8f91-df5c94306234" containerID="5d6a9726c8534c7f2b0628df4a51959661a37a9da74af0ed4b0b1bfba30fd1b1" exitCode=137 Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.578798 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api-log" containerID="cri-o://25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a" gracePeriod=30 Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.578896 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api" containerID="cri-o://329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3" gracePeriod=30 Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.578631 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f7c8b487-cv6l8" event={"ID":"29f04c85-f39d-499e-8f91-df5c94306234","Type":"ContainerDied","Data":"fbc9669ed94dd2101222cedab508c1eb94a7db6c6ab66b9fcb455894458d7b2e"} Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.578968 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f7c8b487-cv6l8" event={"ID":"29f04c85-f39d-499e-8f91-df5c94306234","Type":"ContainerDied","Data":"5d6a9726c8534c7f2b0628df4a51959661a37a9da74af0ed4b0b1bfba30fd1b1"} Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.697385 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2fp8\" (UniqueName: \"kubernetes.io/projected/9d4bc2f5-7862-4a70-9acd-05003ce96228-kube-api-access-f2fp8\") pod \"9d4bc2f5-7862-4a70-9acd-05003ce96228\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.697456 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4bc2f5-7862-4a70-9acd-05003ce96228-logs\") pod \"9d4bc2f5-7862-4a70-9acd-05003ce96228\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.697553 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-scripts\") pod \"9d4bc2f5-7862-4a70-9acd-05003ce96228\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.697579 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-config-data\") pod \"9d4bc2f5-7862-4a70-9acd-05003ce96228\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.697641 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9d4bc2f5-7862-4a70-9acd-05003ce96228-horizon-secret-key\") pod \"9d4bc2f5-7862-4a70-9acd-05003ce96228\" (UID: \"9d4bc2f5-7862-4a70-9acd-05003ce96228\") " Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.698922 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d4bc2f5-7862-4a70-9acd-05003ce96228-logs" (OuterVolumeSpecName: "logs") pod "9d4bc2f5-7862-4a70-9acd-05003ce96228" (UID: "9d4bc2f5-7862-4a70-9acd-05003ce96228"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.707093 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4bc2f5-7862-4a70-9acd-05003ce96228-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9d4bc2f5-7862-4a70-9acd-05003ce96228" (UID: "9d4bc2f5-7862-4a70-9acd-05003ce96228"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.709830 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4bc2f5-7862-4a70-9acd-05003ce96228-kube-api-access-f2fp8" (OuterVolumeSpecName: "kube-api-access-f2fp8") pod "9d4bc2f5-7862-4a70-9acd-05003ce96228" (UID: "9d4bc2f5-7862-4a70-9acd-05003ce96228"). InnerVolumeSpecName "kube-api-access-f2fp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.746187 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-scripts" (OuterVolumeSpecName: "scripts") pod "9d4bc2f5-7862-4a70-9acd-05003ce96228" (UID: "9d4bc2f5-7862-4a70-9acd-05003ce96228"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.747030 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-config-data" (OuterVolumeSpecName: "config-data") pod "9d4bc2f5-7862-4a70-9acd-05003ce96228" (UID: "9d4bc2f5-7862-4a70-9acd-05003ce96228"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.779277 4953 scope.go:117] "RemoveContainer" containerID="2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.800161 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.800334 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9d4bc2f5-7862-4a70-9acd-05003ce96228-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.800390 4953 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9d4bc2f5-7862-4a70-9acd-05003ce96228-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.800441 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2fp8\" (UniqueName: \"kubernetes.io/projected/9d4bc2f5-7862-4a70-9acd-05003ce96228-kube-api-access-f2fp8\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.800528 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d4bc2f5-7862-4a70-9acd-05003ce96228-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.836664 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.845792 4953 scope.go:117] "RemoveContainer" containerID="a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629" Oct 11 03:40:23 crc kubenswrapper[4953]: E1011 03:40:23.846377 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629\": container with ID starting with a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629 not found: ID does not exist" containerID="a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.846420 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629"} err="failed to get container status \"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629\": rpc error: code = NotFound desc = could not find container \"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629\": container with ID starting with a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629 not found: ID does not exist" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.846445 4953 scope.go:117] "RemoveContainer" containerID="2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f" Oct 11 03:40:23 crc kubenswrapper[4953]: E1011 03:40:23.847215 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f\": container with ID starting with 2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f not found: ID does not exist" containerID="2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.847242 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f"} err="failed to get container status \"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f\": rpc error: code = NotFound desc = could not find container \"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f\": container with ID starting with 2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f not found: ID does not exist" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.847257 4953 scope.go:117] "RemoveContainer" containerID="a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.847586 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629"} err="failed to get container status \"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629\": rpc error: code = NotFound desc = could not find container \"a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629\": container with ID starting with a215723fdea6101a2e179667dae93a1e616320349f80986684216ba719a13629 not found: ID does not exist" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.847623 4953 scope.go:117] "RemoveContainer" containerID="2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.848104 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f"} err="failed to get container status \"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f\": rpc error: code = NotFound desc = could not find container \"2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f\": container with ID starting with 2eb1712ec333b89b60d7b4a23e9ed82205cbfb34b5d1095f73a8aa2dbd58525f not found: ID does not exist" Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.903896 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6888d5bb97-swtpx"] Oct 11 03:40:23 crc kubenswrapper[4953]: I1011 03:40:23.917948 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6888d5bb97-swtpx"] Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.005279 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29f04c85-f39d-499e-8f91-df5c94306234-logs\") pod \"29f04c85-f39d-499e-8f91-df5c94306234\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.005579 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjd2m\" (UniqueName: \"kubernetes.io/projected/29f04c85-f39d-499e-8f91-df5c94306234-kube-api-access-tjd2m\") pod \"29f04c85-f39d-499e-8f91-df5c94306234\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.005798 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-config-data\") pod \"29f04c85-f39d-499e-8f91-df5c94306234\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.005831 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-scripts\") pod \"29f04c85-f39d-499e-8f91-df5c94306234\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.005853 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29f04c85-f39d-499e-8f91-df5c94306234-horizon-secret-key\") pod \"29f04c85-f39d-499e-8f91-df5c94306234\" (UID: \"29f04c85-f39d-499e-8f91-df5c94306234\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.006224 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29f04c85-f39d-499e-8f91-df5c94306234-logs" (OuterVolumeSpecName: "logs") pod "29f04c85-f39d-499e-8f91-df5c94306234" (UID: "29f04c85-f39d-499e-8f91-df5c94306234"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.006419 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29f04c85-f39d-499e-8f91-df5c94306234-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.013739 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29f04c85-f39d-499e-8f91-df5c94306234-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "29f04c85-f39d-499e-8f91-df5c94306234" (UID: "29f04c85-f39d-499e-8f91-df5c94306234"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.016790 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29f04c85-f39d-499e-8f91-df5c94306234-kube-api-access-tjd2m" (OuterVolumeSpecName: "kube-api-access-tjd2m") pod "29f04c85-f39d-499e-8f91-df5c94306234" (UID: "29f04c85-f39d-499e-8f91-df5c94306234"). InnerVolumeSpecName "kube-api-access-tjd2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.040255 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-config-data" (OuterVolumeSpecName: "config-data") pod "29f04c85-f39d-499e-8f91-df5c94306234" (UID: "29f04c85-f39d-499e-8f91-df5c94306234"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.042299 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-scripts" (OuterVolumeSpecName: "scripts") pod "29f04c85-f39d-499e-8f91-df5c94306234" (UID: "29f04c85-f39d-499e-8f91-df5c94306234"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.109790 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.109837 4953 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/29f04c85-f39d-499e-8f91-df5c94306234-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.109854 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjd2m\" (UniqueName: \"kubernetes.io/projected/29f04c85-f39d-499e-8f91-df5c94306234-kube-api-access-tjd2m\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.109865 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f04c85-f39d-499e-8f91-df5c94306234-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.277437 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418124 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418229 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-scripts\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418353 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73097cc-de76-4103-9f7a-708464ebbbc1-etc-machine-id\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418421 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73097cc-de76-4103-9f7a-708464ebbbc1-logs\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418546 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ddsb\" (UniqueName: \"kubernetes.io/projected/f73097cc-de76-4103-9f7a-708464ebbbc1-kube-api-access-4ddsb\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418702 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-combined-ca-bundle\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.418731 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data-custom\") pod \"f73097cc-de76-4103-9f7a-708464ebbbc1\" (UID: \"f73097cc-de76-4103-9f7a-708464ebbbc1\") " Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.419332 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f73097cc-de76-4103-9f7a-708464ebbbc1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.420161 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f73097cc-de76-4103-9f7a-708464ebbbc1-logs" (OuterVolumeSpecName: "logs") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.424546 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-scripts" (OuterVolumeSpecName: "scripts") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.425185 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.450832 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f73097cc-de76-4103-9f7a-708464ebbbc1-kube-api-access-4ddsb" (OuterVolumeSpecName: "kube-api-access-4ddsb") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "kube-api-access-4ddsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.482535 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.521101 4953 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f73097cc-de76-4103-9f7a-708464ebbbc1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.521356 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f73097cc-de76-4103-9f7a-708464ebbbc1-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.521457 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ddsb\" (UniqueName: \"kubernetes.io/projected/f73097cc-de76-4103-9f7a-708464ebbbc1-kube-api-access-4ddsb\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.521566 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.521654 4953 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.521722 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.547717 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data" (OuterVolumeSpecName: "config-data") pod "f73097cc-de76-4103-9f7a-708464ebbbc1" (UID: "f73097cc-de76-4103-9f7a-708464ebbbc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594231 4953 generic.go:334] "Generic (PLEG): container finished" podID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerID="329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3" exitCode=0 Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594410 4953 generic.go:334] "Generic (PLEG): container finished" podID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerID="25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a" exitCode=143 Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594517 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f73097cc-de76-4103-9f7a-708464ebbbc1","Type":"ContainerDied","Data":"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3"} Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594589 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f73097cc-de76-4103-9f7a-708464ebbbc1","Type":"ContainerDied","Data":"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a"} Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594671 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"f73097cc-de76-4103-9f7a-708464ebbbc1","Type":"ContainerDied","Data":"8d542336259190c90e2118cb0bb05715aaed2d6345eff136a7cb474595a7135d"} Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594740 4953 scope.go:117] "RemoveContainer" containerID="329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.594812 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.600784 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f7c8b487-cv6l8" event={"ID":"29f04c85-f39d-499e-8f91-df5c94306234","Type":"ContainerDied","Data":"4d746e3c039ed655040c0502fb2f0cdc15934bc0a1942bf417c2d2ed05fe052f"} Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.600940 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f7c8b487-cv6l8" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.623720 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f73097cc-de76-4103-9f7a-708464ebbbc1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.629787 4953 scope.go:117] "RemoveContainer" containerID="25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.652495 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67f7c8b487-cv6l8"] Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.664786 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-67f7c8b487-cv6l8"] Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.687899 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.697591 4953 scope.go:117] "RemoveContainer" containerID="329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.699091 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.703659 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3\": container with ID starting with 329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3 not found: ID does not exist" containerID="329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.703705 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3"} err="failed to get container status \"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3\": rpc error: code = NotFound desc = could not find container \"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3\": container with ID starting with 329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3 not found: ID does not exist" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.703733 4953 scope.go:117] "RemoveContainer" containerID="25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.707659 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.708348 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708366 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api-log" Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.708379 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708397 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon" Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.708415 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708424 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon-log" Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.708436 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708442 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon" Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.708471 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708476 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api" Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.708495 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708500 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708666 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708684 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708698 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" containerName="horizon-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708710 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api-log" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708721 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="29f04c85-f39d-499e-8f91-df5c94306234" containerName="horizon" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.708732 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" containerName="manila-api" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.709780 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: E1011 03:40:24.720409 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a\": container with ID starting with 25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a not found: ID does not exist" containerID="25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.720926 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.721795 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a"} err="failed to get container status \"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a\": rpc error: code = NotFound desc = could not find container \"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a\": container with ID starting with 25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a not found: ID does not exist" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.723288 4953 scope.go:117] "RemoveContainer" containerID="329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.720948 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.721769 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725046 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-config-data\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725079 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-config-data-custom\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725121 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnbwx\" (UniqueName: \"kubernetes.io/projected/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-kube-api-access-pnbwx\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725136 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725195 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-scripts\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725222 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-internal-tls-certs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725285 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-public-tls-certs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725304 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-etc-machine-id\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.725339 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-logs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.726044 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.730048 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3"} err="failed to get container status \"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3\": rpc error: code = NotFound desc = could not find container \"329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3\": container with ID starting with 329d4c3bb53b0dd5561a513841c413030cd0b02a9b2839b5ad576085b00533e3 not found: ID does not exist" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.730231 4953 scope.go:117] "RemoveContainer" containerID="25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.732259 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a"} err="failed to get container status \"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a\": rpc error: code = NotFound desc = could not find container \"25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a\": container with ID starting with 25966f62d8cb06588ccd2466fd1077fa6736e6852098a1700be8dd403042285a not found: ID does not exist" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.732304 4953 scope.go:117] "RemoveContainer" containerID="fbc9669ed94dd2101222cedab508c1eb94a7db6c6ab66b9fcb455894458d7b2e" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.826776 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnbwx\" (UniqueName: \"kubernetes.io/projected/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-kube-api-access-pnbwx\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.826827 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.827571 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-scripts\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.827655 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-internal-tls-certs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.827826 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-public-tls-certs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.827855 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-etc-machine-id\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.827918 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-logs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.828014 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-config-data\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.828039 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-config-data-custom\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.829984 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-etc-machine-id\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.830322 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-logs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.832584 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-scripts\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.834926 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-config-data-custom\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.835636 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-config-data\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.835988 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-internal-tls-certs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.837442 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.840196 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-public-tls-certs\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.843954 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnbwx\" (UniqueName: \"kubernetes.io/projected/7eabdab5-7b5e-46f1-a558-6b7f4f25ae75-kube-api-access-pnbwx\") pod \"manila-api-0\" (UID: \"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75\") " pod="openstack/manila-api-0" Oct 11 03:40:24 crc kubenswrapper[4953]: I1011 03:40:24.953782 4953 scope.go:117] "RemoveContainer" containerID="5d6a9726c8534c7f2b0628df4a51959661a37a9da74af0ed4b0b1bfba30fd1b1" Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.047238 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.118446 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.118792 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-central-agent" containerID="cri-o://ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179" gracePeriod=30 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.118879 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="sg-core" containerID="cri-o://98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830" gracePeriod=30 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.118851 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="proxy-httpd" containerID="cri-o://80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0" gracePeriod=30 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.118916 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-notification-agent" containerID="cri-o://5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a" gracePeriod=30 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.626175 4953 generic.go:334] "Generic (PLEG): container finished" podID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerID="80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0" exitCode=0 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.626462 4953 generic.go:334] "Generic (PLEG): container finished" podID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerID="98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830" exitCode=2 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.626473 4953 generic.go:334] "Generic (PLEG): container finished" podID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerID="ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179" exitCode=0 Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.626213 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerDied","Data":"80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0"} Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.626500 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerDied","Data":"98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830"} Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.626533 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerDied","Data":"ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179"} Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.808310 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29f04c85-f39d-499e-8f91-df5c94306234" path="/var/lib/kubelet/pods/29f04c85-f39d-499e-8f91-df5c94306234/volumes" Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.809310 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4bc2f5-7862-4a70-9acd-05003ce96228" path="/var/lib/kubelet/pods/9d4bc2f5-7862-4a70-9acd-05003ce96228/volumes" Oct 11 03:40:25 crc kubenswrapper[4953]: I1011 03:40:25.810198 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f73097cc-de76-4103-9f7a-708464ebbbc1" path="/var/lib/kubelet/pods/f73097cc-de76-4103-9f7a-708464ebbbc1/volumes" Oct 11 03:40:27 crc kubenswrapper[4953]: I1011 03:40:27.760723 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 11 03:40:27 crc kubenswrapper[4953]: W1011 03:40:27.767792 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7eabdab5_7b5e_46f1_a558_6b7f4f25ae75.slice/crio-d585eaeb485c3d82f9851d865427a836c68bc2713cc51981c3e7838a6e1cd03f WatchSource:0}: Error finding container d585eaeb485c3d82f9851d865427a836c68bc2713cc51981c3e7838a6e1cd03f: Status 404 returned error can't find the container with id d585eaeb485c3d82f9851d865427a836c68bc2713cc51981c3e7838a6e1cd03f Oct 11 03:40:28 crc kubenswrapper[4953]: I1011 03:40:28.658156 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75","Type":"ContainerStarted","Data":"94b64c97ef2bd1c8f88f7b52e27e691999e0c568718752baf6a865788d707184"} Oct 11 03:40:28 crc kubenswrapper[4953]: I1011 03:40:28.658562 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75","Type":"ContainerStarted","Data":"d585eaeb485c3d82f9851d865427a836c68bc2713cc51981c3e7838a6e1cd03f"} Oct 11 03:40:28 crc kubenswrapper[4953]: I1011 03:40:28.660292 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001","Type":"ContainerStarted","Data":"ad9a524a9eaa79c0ba63687e6a0c3e6ad78ffac856569d471cbf218a83cd6761"} Oct 11 03:40:28 crc kubenswrapper[4953]: I1011 03:40:28.660325 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001","Type":"ContainerStarted","Data":"3743c16c67fa2786e1f462201c375c0ac3bb706382f2e08a63e69461f6395487"} Oct 11 03:40:28 crc kubenswrapper[4953]: I1011 03:40:28.678207 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.2664396829999998 podStartE2EDuration="10.678187311s" podCreationTimestamp="2025-10-11 03:40:18 +0000 UTC" firstStartedPulling="2025-10-11 03:40:19.85289251 +0000 UTC m=+3230.785980154" lastFinishedPulling="2025-10-11 03:40:27.264640138 +0000 UTC m=+3238.197727782" observedRunningTime="2025-10-11 03:40:28.677817752 +0000 UTC m=+3239.610905416" watchObservedRunningTime="2025-10-11 03:40:28.678187311 +0000 UTC m=+3239.611274955" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.104008 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.160980 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.179834 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76b5fdb995-x4cwh" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.242754 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-864d5fc68c-n86p9"] Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.242992 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerName="dnsmasq-dns" containerID="cri-o://406880cb6e89e8a4a1b6ed10c31cfeac875b27262587b84f9f5798f8966ca5e1" gracePeriod=10 Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.458914 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529624 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-sg-core-conf-yaml\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529673 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-ceilometer-tls-certs\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529710 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-combined-ca-bundle\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529821 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-scripts\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529857 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-log-httpd\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529923 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-run-httpd\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.529989 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pghg2\" (UniqueName: \"kubernetes.io/projected/d6802295-eeec-4abf-85f7-6c909c6bcbc2-kube-api-access-pghg2\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.530038 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-config-data\") pod \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\" (UID: \"d6802295-eeec-4abf-85f7-6c909c6bcbc2\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.530486 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.530595 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.535680 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6802295-eeec-4abf-85f7-6c909c6bcbc2-kube-api-access-pghg2" (OuterVolumeSpecName: "kube-api-access-pghg2") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "kube-api-access-pghg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.536308 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-scripts" (OuterVolumeSpecName: "scripts") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.575727 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.634065 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.634263 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.634336 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.634389 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6802295-eeec-4abf-85f7-6c909c6bcbc2-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.634455 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pghg2\" (UniqueName: \"kubernetes.io/projected/d6802295-eeec-4abf-85f7-6c909c6bcbc2-kube-api-access-pghg2\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.676445 4953 generic.go:334] "Generic (PLEG): container finished" podID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerID="406880cb6e89e8a4a1b6ed10c31cfeac875b27262587b84f9f5798f8966ca5e1" exitCode=0 Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.676535 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" event={"ID":"cad37403-5515-40a8-ab51-d2a40ffbff0f","Type":"ContainerDied","Data":"406880cb6e89e8a4a1b6ed10c31cfeac875b27262587b84f9f5798f8966ca5e1"} Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.689193 4953 generic.go:334] "Generic (PLEG): container finished" podID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerID="5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a" exitCode=0 Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.689280 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerDied","Data":"5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a"} Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.689329 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6802295-eeec-4abf-85f7-6c909c6bcbc2","Type":"ContainerDied","Data":"5fbb9231dc7aefadc180c226cd0e8376f164900f474cb27437ff9de7b63f5883"} Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.689349 4953 scope.go:117] "RemoveContainer" containerID="80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.689553 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.701995 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"7eabdab5-7b5e-46f1-a558-6b7f4f25ae75","Type":"ContainerStarted","Data":"81e0b237e65fa196ae6e3a97c26a0e5ddad2edbd454f3522f248af0dfea0dde5"} Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.702045 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.712203 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.721329 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.735221 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-dns-svc\") pod \"cad37403-5515-40a8-ab51-d2a40ffbff0f\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.735275 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-sb\") pod \"cad37403-5515-40a8-ab51-d2a40ffbff0f\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.735313 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-openstack-edpm-ipam\") pod \"cad37403-5515-40a8-ab51-d2a40ffbff0f\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.735397 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-config\") pod \"cad37403-5515-40a8-ab51-d2a40ffbff0f\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.735423 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-nb\") pod \"cad37403-5515-40a8-ab51-d2a40ffbff0f\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.735445 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8kpb\" (UniqueName: \"kubernetes.io/projected/cad37403-5515-40a8-ab51-d2a40ffbff0f-kube-api-access-s8kpb\") pod \"cad37403-5515-40a8-ab51-d2a40ffbff0f\" (UID: \"cad37403-5515-40a8-ab51-d2a40ffbff0f\") " Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.736535 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.738381 4953 scope.go:117] "RemoveContainer" containerID="98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.739661 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.739651298 podStartE2EDuration="5.739651298s" podCreationTimestamp="2025-10-11 03:40:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:40:29.724353863 +0000 UTC m=+3240.657441507" watchObservedRunningTime="2025-10-11 03:40:29.739651298 +0000 UTC m=+3240.672738942" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.747004 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cad37403-5515-40a8-ab51-d2a40ffbff0f-kube-api-access-s8kpb" (OuterVolumeSpecName: "kube-api-access-s8kpb") pod "cad37403-5515-40a8-ab51-d2a40ffbff0f" (UID: "cad37403-5515-40a8-ab51-d2a40ffbff0f"). InnerVolumeSpecName "kube-api-access-s8kpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.769096 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.787477 4953 scope.go:117] "RemoveContainer" containerID="5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.791758 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-config-data" (OuterVolumeSpecName: "config-data") pod "d6802295-eeec-4abf-85f7-6c909c6bcbc2" (UID: "d6802295-eeec-4abf-85f7-6c909c6bcbc2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.811522 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-config" (OuterVolumeSpecName: "config") pod "cad37403-5515-40a8-ab51-d2a40ffbff0f" (UID: "cad37403-5515-40a8-ab51-d2a40ffbff0f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.816414 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cad37403-5515-40a8-ab51-d2a40ffbff0f" (UID: "cad37403-5515-40a8-ab51-d2a40ffbff0f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.825401 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cad37403-5515-40a8-ab51-d2a40ffbff0f" (UID: "cad37403-5515-40a8-ab51-d2a40ffbff0f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.828311 4953 scope.go:117] "RemoveContainer" containerID="ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.829541 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "cad37403-5515-40a8-ab51-d2a40ffbff0f" (UID: "cad37403-5515-40a8-ab51-d2a40ffbff0f"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838152 4953 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838188 4953 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838197 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838208 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838218 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6802295-eeec-4abf-85f7-6c909c6bcbc2-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838227 4953 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-config\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.838235 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8kpb\" (UniqueName: \"kubernetes.io/projected/cad37403-5515-40a8-ab51-d2a40ffbff0f-kube-api-access-s8kpb\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.852722 4953 scope.go:117] "RemoveContainer" containerID="80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0" Oct 11 03:40:29 crc kubenswrapper[4953]: E1011 03:40:29.853229 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0\": container with ID starting with 80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0 not found: ID does not exist" containerID="80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.853286 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0"} err="failed to get container status \"80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0\": rpc error: code = NotFound desc = could not find container \"80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0\": container with ID starting with 80d6e8b73c15fe3ca3a959f92d45ecf4e23b701994ca6318051a9ddc89397ad0 not found: ID does not exist" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.853320 4953 scope.go:117] "RemoveContainer" containerID="98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830" Oct 11 03:40:29 crc kubenswrapper[4953]: E1011 03:40:29.853974 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830\": container with ID starting with 98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830 not found: ID does not exist" containerID="98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.854007 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830"} err="failed to get container status \"98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830\": rpc error: code = NotFound desc = could not find container \"98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830\": container with ID starting with 98cf98f79d9fdc4e89d807c1f055ff4df624b594fd75368d12d999582c9ca830 not found: ID does not exist" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.854028 4953 scope.go:117] "RemoveContainer" containerID="5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a" Oct 11 03:40:29 crc kubenswrapper[4953]: E1011 03:40:29.854379 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a\": container with ID starting with 5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a not found: ID does not exist" containerID="5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.854420 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a"} err="failed to get container status \"5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a\": rpc error: code = NotFound desc = could not find container \"5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a\": container with ID starting with 5e0216d5ac305964f0f717292b22b28473dc78b31d067c8d91b3144e016f4f6a not found: ID does not exist" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.854449 4953 scope.go:117] "RemoveContainer" containerID="ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179" Oct 11 03:40:29 crc kubenswrapper[4953]: E1011 03:40:29.854780 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179\": container with ID starting with ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179 not found: ID does not exist" containerID="ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.854807 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179"} err="failed to get container status \"ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179\": rpc error: code = NotFound desc = could not find container \"ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179\": container with ID starting with ac7e607883544af650f37867384de9dbb153cceadcdea3b119e41e2d9e57f179 not found: ID does not exist" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.856324 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cad37403-5515-40a8-ab51-d2a40ffbff0f" (UID: "cad37403-5515-40a8-ab51-d2a40ffbff0f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:29 crc kubenswrapper[4953]: I1011 03:40:29.939644 4953 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cad37403-5515-40a8-ab51-d2a40ffbff0f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.014286 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.024791 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.050300 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:30 crc kubenswrapper[4953]: E1011 03:40:30.050954 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="proxy-httpd" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.050979 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="proxy-httpd" Oct 11 03:40:30 crc kubenswrapper[4953]: E1011 03:40:30.051002 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-notification-agent" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051012 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-notification-agent" Oct 11 03:40:30 crc kubenswrapper[4953]: E1011 03:40:30.051061 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerName="dnsmasq-dns" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051071 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerName="dnsmasq-dns" Oct 11 03:40:30 crc kubenswrapper[4953]: E1011 03:40:30.051091 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="sg-core" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051099 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="sg-core" Oct 11 03:40:30 crc kubenswrapper[4953]: E1011 03:40:30.051125 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerName="init" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051133 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerName="init" Oct 11 03:40:30 crc kubenswrapper[4953]: E1011 03:40:30.051144 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-central-agent" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051152 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-central-agent" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051776 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="sg-core" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051802 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-notification-agent" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.051817 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="ceilometer-central-agent" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.052951 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" containerName="dnsmasq-dns" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.052985 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" containerName="proxy-httpd" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.055136 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.057935 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.058222 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.058390 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.063070 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145305 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-log-httpd\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145385 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-run-httpd\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145419 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145440 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145466 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7szl\" (UniqueName: \"kubernetes.io/projected/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-kube-api-access-j7szl\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145553 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145575 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-config-data\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.145597 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-scripts\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247283 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247358 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-config-data\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247395 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-scripts\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247430 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-log-httpd\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247488 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-run-httpd\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247525 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247554 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.247582 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7szl\" (UniqueName: \"kubernetes.io/projected/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-kube-api-access-j7szl\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.248868 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-log-httpd\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.249077 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-run-httpd\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.251596 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.251809 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.251846 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.254510 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-config-data\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.255880 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-scripts\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.268581 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7szl\" (UniqueName: \"kubernetes.io/projected/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-kube-api-access-j7szl\") pod \"ceilometer-0\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.381566 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.713568 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" event={"ID":"cad37403-5515-40a8-ab51-d2a40ffbff0f","Type":"ContainerDied","Data":"1b5e76b4b7e8e632df8b786ae32ce02347bb174be8afaf92a01dfb7ac32feba1"} Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.713659 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-864d5fc68c-n86p9" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.713904 4953 scope.go:117] "RemoveContainer" containerID="406880cb6e89e8a4a1b6ed10c31cfeac875b27262587b84f9f5798f8966ca5e1" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.742275 4953 scope.go:117] "RemoveContainer" containerID="a810d2768a3b27bf97fe3ddcedbba901c162e27e5e34a8e34744292bd2ed69cb" Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.749670 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-864d5fc68c-n86p9"] Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.757233 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-864d5fc68c-n86p9"] Oct 11 03:40:30 crc kubenswrapper[4953]: I1011 03:40:30.851433 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:31 crc kubenswrapper[4953]: I1011 03:40:31.460293 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:31 crc kubenswrapper[4953]: I1011 03:40:31.728059 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerStarted","Data":"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3"} Oct 11 03:40:31 crc kubenswrapper[4953]: I1011 03:40:31.728724 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerStarted","Data":"96e4c94ae903e95e83d104f7355d230106ef02b2bf3b8bdd394268c4dafe76b8"} Oct 11 03:40:31 crc kubenswrapper[4953]: I1011 03:40:31.816062 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cad37403-5515-40a8-ab51-d2a40ffbff0f" path="/var/lib/kubelet/pods/cad37403-5515-40a8-ab51-d2a40ffbff0f/volumes" Oct 11 03:40:31 crc kubenswrapper[4953]: I1011 03:40:31.817296 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6802295-eeec-4abf-85f7-6c909c6bcbc2" path="/var/lib/kubelet/pods/d6802295-eeec-4abf-85f7-6c909c6bcbc2/volumes" Oct 11 03:40:32 crc kubenswrapper[4953]: E1011 03:40:32.113029 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:40:32 crc kubenswrapper[4953]: I1011 03:40:32.738541 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerStarted","Data":"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe"} Oct 11 03:40:32 crc kubenswrapper[4953]: I1011 03:40:32.995705 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-597b9665b8-kw4fw" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.247:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.247:8443: connect: connection refused" Oct 11 03:40:33 crc kubenswrapper[4953]: I1011 03:40:33.748674 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerStarted","Data":"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5"} Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.767897 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerStarted","Data":"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1"} Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.768041 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-central-agent" containerID="cri-o://38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" gracePeriod=30 Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.768189 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="sg-core" containerID="cri-o://066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" gracePeriod=30 Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.768222 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="proxy-httpd" containerID="cri-o://4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" gracePeriod=30 Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.768238 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-notification-agent" containerID="cri-o://c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" gracePeriod=30 Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.768477 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:40:35 crc kubenswrapper[4953]: I1011 03:40:35.792643 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.47559166 podStartE2EDuration="5.792623921s" podCreationTimestamp="2025-10-11 03:40:30 +0000 UTC" firstStartedPulling="2025-10-11 03:40:30.854918029 +0000 UTC m=+3241.788005673" lastFinishedPulling="2025-10-11 03:40:35.17195029 +0000 UTC m=+3246.105037934" observedRunningTime="2025-10-11 03:40:35.789554254 +0000 UTC m=+3246.722641918" watchObservedRunningTime="2025-10-11 03:40:35.792623921 +0000 UTC m=+3246.725711565" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.633120 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.780755 4953 generic.go:334] "Generic (PLEG): container finished" podID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" exitCode=0 Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781113 4953 generic.go:334] "Generic (PLEG): container finished" podID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" exitCode=2 Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781126 4953 generic.go:334] "Generic (PLEG): container finished" podID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" exitCode=0 Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.780844 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781135 4953 generic.go:334] "Generic (PLEG): container finished" podID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" exitCode=0 Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.780879 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerDied","Data":"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1"} Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781383 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerDied","Data":"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5"} Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781410 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerDied","Data":"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe"} Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781427 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerDied","Data":"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3"} Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781440 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd","Type":"ContainerDied","Data":"96e4c94ae903e95e83d104f7355d230106ef02b2bf3b8bdd394268c4dafe76b8"} Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.781467 4953 scope.go:117] "RemoveContainer" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.786389 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-sg-core-conf-yaml\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.786538 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7szl\" (UniqueName: \"kubernetes.io/projected/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-kube-api-access-j7szl\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.786569 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-scripts\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.786597 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-config-data\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.786649 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-log-httpd\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.787412 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.787479 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-combined-ca-bundle\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.787567 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-run-httpd\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.787924 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.788139 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-ceilometer-tls-certs\") pod \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\" (UID: \"3242bb0c-9e5d-4c30-b1ff-b4cd552083dd\") " Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.789325 4953 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.789356 4953 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.796415 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-scripts" (OuterVolumeSpecName: "scripts") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.797880 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-kube-api-access-j7szl" (OuterVolumeSpecName: "kube-api-access-j7szl") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "kube-api-access-j7szl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.819217 4953 scope.go:117] "RemoveContainer" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.824506 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.841080 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.891148 4953 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.891183 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7szl\" (UniqueName: \"kubernetes.io/projected/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-kube-api-access-j7szl\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.891195 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.891203 4953 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.918902 4953 scope.go:117] "RemoveContainer" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.928378 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-config-data" (OuterVolumeSpecName: "config-data") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.935334 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" (UID: "3242bb0c-9e5d-4c30-b1ff-b4cd552083dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.945979 4953 scope.go:117] "RemoveContainer" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.981217 4953 scope.go:117] "RemoveContainer" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" Oct 11 03:40:36 crc kubenswrapper[4953]: E1011 03:40:36.982820 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": container with ID starting with 4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1 not found: ID does not exist" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.982881 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1"} err="failed to get container status \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": rpc error: code = NotFound desc = could not find container \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": container with ID starting with 4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.982918 4953 scope.go:117] "RemoveContainer" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" Oct 11 03:40:36 crc kubenswrapper[4953]: E1011 03:40:36.983249 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": container with ID starting with 066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5 not found: ID does not exist" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.983291 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5"} err="failed to get container status \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": rpc error: code = NotFound desc = could not find container \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": container with ID starting with 066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.983321 4953 scope.go:117] "RemoveContainer" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" Oct 11 03:40:36 crc kubenswrapper[4953]: E1011 03:40:36.983593 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": container with ID starting with c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe not found: ID does not exist" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.983636 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe"} err="failed to get container status \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": rpc error: code = NotFound desc = could not find container \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": container with ID starting with c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.983652 4953 scope.go:117] "RemoveContainer" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" Oct 11 03:40:36 crc kubenswrapper[4953]: E1011 03:40:36.983926 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": container with ID starting with 38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3 not found: ID does not exist" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.983974 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3"} err="failed to get container status \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": rpc error: code = NotFound desc = could not find container \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": container with ID starting with 38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.984001 4953 scope.go:117] "RemoveContainer" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.984703 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1"} err="failed to get container status \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": rpc error: code = NotFound desc = could not find container \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": container with ID starting with 4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.984773 4953 scope.go:117] "RemoveContainer" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.985137 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5"} err="failed to get container status \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": rpc error: code = NotFound desc = could not find container \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": container with ID starting with 066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.985165 4953 scope.go:117] "RemoveContainer" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.985427 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe"} err="failed to get container status \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": rpc error: code = NotFound desc = could not find container \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": container with ID starting with c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.985450 4953 scope.go:117] "RemoveContainer" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.985741 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3"} err="failed to get container status \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": rpc error: code = NotFound desc = could not find container \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": container with ID starting with 38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.985773 4953 scope.go:117] "RemoveContainer" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.986084 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1"} err="failed to get container status \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": rpc error: code = NotFound desc = could not find container \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": container with ID starting with 4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.986117 4953 scope.go:117] "RemoveContainer" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.986479 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5"} err="failed to get container status \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": rpc error: code = NotFound desc = could not find container \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": container with ID starting with 066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.986512 4953 scope.go:117] "RemoveContainer" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.986845 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe"} err="failed to get container status \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": rpc error: code = NotFound desc = could not find container \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": container with ID starting with c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.986883 4953 scope.go:117] "RemoveContainer" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.987144 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3"} err="failed to get container status \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": rpc error: code = NotFound desc = could not find container \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": container with ID starting with 38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.987175 4953 scope.go:117] "RemoveContainer" containerID="4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.987425 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1"} err="failed to get container status \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": rpc error: code = NotFound desc = could not find container \"4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1\": container with ID starting with 4c9315895546593e3fba686bdb309913da0fc8eaaf8cc2b8e9830753b12490d1 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.987450 4953 scope.go:117] "RemoveContainer" containerID="066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.987751 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5"} err="failed to get container status \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": rpc error: code = NotFound desc = could not find container \"066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5\": container with ID starting with 066d23b9d8d8effb2b21ff8ae933c869dd6efd10997c3a1f28e6258fe3d047d5 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.987776 4953 scope.go:117] "RemoveContainer" containerID="c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.988007 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe"} err="failed to get container status \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": rpc error: code = NotFound desc = could not find container \"c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe\": container with ID starting with c2fa1a2c85d2a9a428425766d6d989abf326cd05bbb412469565e5737347cabe not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.988033 4953 scope.go:117] "RemoveContainer" containerID="38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.988397 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3"} err="failed to get container status \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": rpc error: code = NotFound desc = could not find container \"38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3\": container with ID starting with 38254416e3a492b0d3455d0be18e9d941b1a2cbbc9c628e1c7fedbc99dd841c3 not found: ID does not exist" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.993039 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:36 crc kubenswrapper[4953]: I1011 03:40:36.993073 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.115665 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.121729 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.136283 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:37 crc kubenswrapper[4953]: E1011 03:40:37.136810 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="proxy-httpd" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.136833 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="proxy-httpd" Oct 11 03:40:37 crc kubenswrapper[4953]: E1011 03:40:37.136873 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="sg-core" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.136881 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="sg-core" Oct 11 03:40:37 crc kubenswrapper[4953]: E1011 03:40:37.136899 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-notification-agent" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.136905 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-notification-agent" Oct 11 03:40:37 crc kubenswrapper[4953]: E1011 03:40:37.136919 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-central-agent" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.136941 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-central-agent" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.137146 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="proxy-httpd" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.137184 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-notification-agent" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.137197 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="ceilometer-central-agent" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.137208 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" containerName="sg-core" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.142086 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.144400 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.144467 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.152374 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.153051 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299712 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1608d80-9530-49ad-9bd2-43d2865e6eeb-log-httpd\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299793 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffnhw\" (UniqueName: \"kubernetes.io/projected/d1608d80-9530-49ad-9bd2-43d2865e6eeb-kube-api-access-ffnhw\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299812 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-scripts\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299905 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-config-data\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299921 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1608d80-9530-49ad-9bd2-43d2865e6eeb-run-httpd\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299940 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.299978 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.300144 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.401929 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402037 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402106 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1608d80-9530-49ad-9bd2-43d2865e6eeb-log-httpd\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402180 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffnhw\" (UniqueName: \"kubernetes.io/projected/d1608d80-9530-49ad-9bd2-43d2865e6eeb-kube-api-access-ffnhw\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402201 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-scripts\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402264 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-config-data\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402296 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1608d80-9530-49ad-9bd2-43d2865e6eeb-run-httpd\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.402323 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.403200 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1608d80-9530-49ad-9bd2-43d2865e6eeb-log-httpd\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.403427 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d1608d80-9530-49ad-9bd2-43d2865e6eeb-run-httpd\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.406463 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.407265 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.407551 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.408771 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-scripts\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.410848 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1608d80-9530-49ad-9bd2-43d2865e6eeb-config-data\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.421516 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffnhw\" (UniqueName: \"kubernetes.io/projected/d1608d80-9530-49ad-9bd2-43d2865e6eeb-kube-api-access-ffnhw\") pod \"ceilometer-0\" (UID: \"d1608d80-9530-49ad-9bd2-43d2865e6eeb\") " pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.458277 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.811644 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3242bb0c-9e5d-4c30-b1ff-b4cd552083dd" path="/var/lib/kubelet/pods/3242bb0c-9e5d-4c30-b1ff-b4cd552083dd/volumes" Oct 11 03:40:37 crc kubenswrapper[4953]: I1011 03:40:37.915347 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 11 03:40:37 crc kubenswrapper[4953]: W1011 03:40:37.929168 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1608d80_9530_49ad_9bd2_43d2865e6eeb.slice/crio-2da584a952dbdc2072e36397224cf158c8e908ec978dfc2c9245cae602b100fd WatchSource:0}: Error finding container 2da584a952dbdc2072e36397224cf158c8e908ec978dfc2c9245cae602b100fd: Status 404 returned error can't find the container with id 2da584a952dbdc2072e36397224cf158c8e908ec978dfc2c9245cae602b100fd Oct 11 03:40:38 crc kubenswrapper[4953]: I1011 03:40:38.607874 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:40:38 crc kubenswrapper[4953]: I1011 03:40:38.801289 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1608d80-9530-49ad-9bd2-43d2865e6eeb","Type":"ContainerStarted","Data":"177a1aa76b43a6457670a8e6abf7eccff5e85b09cfab89b2c6bc89bc88db6237"} Oct 11 03:40:38 crc kubenswrapper[4953]: I1011 03:40:38.801933 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1608d80-9530-49ad-9bd2-43d2865e6eeb","Type":"ContainerStarted","Data":"2da584a952dbdc2072e36397224cf158c8e908ec978dfc2c9245cae602b100fd"} Oct 11 03:40:39 crc kubenswrapper[4953]: I1011 03:40:39.818860 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1608d80-9530-49ad-9bd2-43d2865e6eeb","Type":"ContainerStarted","Data":"1fd9739e5bf28ae167f70c6d93fa615a251f5465a1e7878747ee1b9ff924871f"} Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.785438 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.833278 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.837390 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1608d80-9530-49ad-9bd2-43d2865e6eeb","Type":"ContainerStarted","Data":"957288ae052b6e2e9eac90259eac723b623c7a3aa10d7344c44f17c9a2ea49b6"} Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.839547 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.839858 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="manila-scheduler" containerID="cri-o://e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57" gracePeriod=30 Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.839884 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="probe" containerID="cri-o://480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce" gracePeriod=30 Oct 11 03:40:40 crc kubenswrapper[4953]: I1011 03:40:40.898447 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:41 crc kubenswrapper[4953]: I1011 03:40:41.846455 4953 generic.go:334] "Generic (PLEG): container finished" podID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerID="480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce" exitCode=0 Oct 11 03:40:41 crc kubenswrapper[4953]: I1011 03:40:41.846542 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f","Type":"ContainerDied","Data":"480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce"} Oct 11 03:40:41 crc kubenswrapper[4953]: I1011 03:40:41.846951 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="manila-share" containerID="cri-o://3743c16c67fa2786e1f462201c375c0ac3bb706382f2e08a63e69461f6395487" gracePeriod=30 Oct 11 03:40:41 crc kubenswrapper[4953]: I1011 03:40:41.847009 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="probe" containerID="cri-o://ad9a524a9eaa79c0ba63687e6a0c3e6ad78ffac856569d471cbf218a83cd6761" gracePeriod=30 Oct 11 03:40:42 crc kubenswrapper[4953]: E1011 03:40:42.415909 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:40:42 crc kubenswrapper[4953]: I1011 03:40:42.866704 4953 generic.go:334] "Generic (PLEG): container finished" podID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerID="ad9a524a9eaa79c0ba63687e6a0c3e6ad78ffac856569d471cbf218a83cd6761" exitCode=0 Oct 11 03:40:42 crc kubenswrapper[4953]: I1011 03:40:42.866955 4953 generic.go:334] "Generic (PLEG): container finished" podID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerID="3743c16c67fa2786e1f462201c375c0ac3bb706382f2e08a63e69461f6395487" exitCode=1 Oct 11 03:40:42 crc kubenswrapper[4953]: I1011 03:40:42.866975 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001","Type":"ContainerDied","Data":"ad9a524a9eaa79c0ba63687e6a0c3e6ad78ffac856569d471cbf218a83cd6761"} Oct 11 03:40:42 crc kubenswrapper[4953]: I1011 03:40:42.867001 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001","Type":"ContainerDied","Data":"3743c16c67fa2786e1f462201c375c0ac3bb706382f2e08a63e69461f6395487"} Oct 11 03:40:42 crc kubenswrapper[4953]: I1011 03:40:42.995749 4953 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-597b9665b8-kw4fw" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.247:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.247:8443: connect: connection refused" Oct 11 03:40:42 crc kubenswrapper[4953]: I1011 03:40:42.995991 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.071325 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.136828 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2ndj\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-kube-api-access-s2ndj\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.136933 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-combined-ca-bundle\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.136969 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-var-lib-manila\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.137018 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-scripts\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.137067 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-ceph\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.137101 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-etc-machine-id\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.137152 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data-custom\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.137254 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data\") pod \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\" (UID: \"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.137782 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.142410 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-scripts" (OuterVolumeSpecName: "scripts") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.145521 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.147740 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-kube-api-access-s2ndj" (OuterVolumeSpecName: "kube-api-access-s2ndj") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "kube-api-access-s2ndj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.154399 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-ceph" (OuterVolumeSpecName: "ceph") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.159395 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.235287 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243303 4953 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-ceph\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243343 4953 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243358 4953 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243370 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2ndj\" (UniqueName: \"kubernetes.io/projected/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-kube-api-access-s2ndj\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243383 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243395 4953 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-var-lib-manila\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.243406 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.261199 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data" (OuterVolumeSpecName: "config-data") pod "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" (UID: "f3ac8eb4-35e7-4e53-bdde-d7590ae4d001"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.345173 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.878189 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.878332 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"f3ac8eb4-35e7-4e53-bdde-d7590ae4d001","Type":"ContainerDied","Data":"1b82b0e2c617acbb045bc692aeac9e6d7e50119abdc2ca664d99ddf13e24352f"} Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.878629 4953 scope.go:117] "RemoveContainer" containerID="ad9a524a9eaa79c0ba63687e6a0c3e6ad78ffac856569d471cbf218a83cd6761" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.886898 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d1608d80-9530-49ad-9bd2-43d2865e6eeb","Type":"ContainerStarted","Data":"1f72445061c95d041e5b46b09d4c95f484e22275c0dd475668599445ff65584c"} Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.886949 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.900537 4953 generic.go:334] "Generic (PLEG): container finished" podID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerID="e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57" exitCode=0 Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.900574 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f","Type":"ContainerDied","Data":"e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57"} Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.900610 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f","Type":"ContainerDied","Data":"278e6888e2cba7b9f01d73181e3eb548a680e36ef6776ecef158f0b845181919"} Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.907273 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.919962 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.933154 4953 scope.go:117] "RemoveContainer" containerID="3743c16c67fa2786e1f462201c375c0ac3bb706382f2e08a63e69461f6395487" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.956127 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-scripts\") pod \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.956180 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-combined-ca-bundle\") pod \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.956272 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data\") pod \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.956294 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-etc-machine-id\") pod \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.956338 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gspmx\" (UniqueName: \"kubernetes.io/projected/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-kube-api-access-gspmx\") pod \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.956374 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data-custom\") pod \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\" (UID: \"9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f\") " Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.963896 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.004760211 podStartE2EDuration="6.963873431s" podCreationTimestamp="2025-10-11 03:40:37 +0000 UTC" firstStartedPulling="2025-10-11 03:40:37.932631386 +0000 UTC m=+3248.865719020" lastFinishedPulling="2025-10-11 03:40:42.891744596 +0000 UTC m=+3253.824832240" observedRunningTime="2025-10-11 03:40:43.919618518 +0000 UTC m=+3254.852706162" watchObservedRunningTime="2025-10-11 03:40:43.963873431 +0000 UTC m=+3254.896961075" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.964666 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:43 crc kubenswrapper[4953]: E1011 03:40:43.965085 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="manila-scheduler" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965110 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="manila-scheduler" Oct 11 03:40:43 crc kubenswrapper[4953]: E1011 03:40:43.965123 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="probe" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965131 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="probe" Oct 11 03:40:43 crc kubenswrapper[4953]: E1011 03:40:43.965167 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="manila-share" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965174 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="manila-share" Oct 11 03:40:43 crc kubenswrapper[4953]: E1011 03:40:43.965186 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="probe" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965193 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="probe" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965367 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="probe" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965383 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="manila-scheduler" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965397 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" containerName="probe" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.965407 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" containerName="manila-share" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.966387 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.968767 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" (UID: "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.971504 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-scripts" (OuterVolumeSpecName: "scripts") pod "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" (UID: "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.972552 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.973568 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" (UID: "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.980890 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:43 crc kubenswrapper[4953]: I1011 03:40:43.985299 4953 scope.go:117] "RemoveContainer" containerID="480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.009806 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-kube-api-access-gspmx" (OuterVolumeSpecName: "kube-api-access-gspmx") pod "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" (UID: "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f"). InnerVolumeSpecName "kube-api-access-gspmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.058891 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.058927 4953 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.058940 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gspmx\" (UniqueName: \"kubernetes.io/projected/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-kube-api-access-gspmx\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.058951 4953 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.113662 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" (UID: "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.153244 4953 scope.go:117] "RemoveContainer" containerID="e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165312 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtwxp\" (UniqueName: \"kubernetes.io/projected/4dcedb20-4ac9-4bbf-bece-be0f325d98df-kube-api-access-mtwxp\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165367 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-scripts\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165394 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-config-data\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165475 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4dcedb20-4ac9-4bbf-bece-be0f325d98df-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165494 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165545 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/4dcedb20-4ac9-4bbf-bece-be0f325d98df-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165584 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165632 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4dcedb20-4ac9-4bbf-bece-be0f325d98df-ceph\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.165685 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.193142 4953 scope.go:117] "RemoveContainer" containerID="480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce" Oct 11 03:40:44 crc kubenswrapper[4953]: E1011 03:40:44.197025 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce\": container with ID starting with 480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce not found: ID does not exist" containerID="480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.197074 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce"} err="failed to get container status \"480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce\": rpc error: code = NotFound desc = could not find container \"480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce\": container with ID starting with 480bc2576681df1c62986d162470654b28a07e1ce492293dc8f331bd782d38ce not found: ID does not exist" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.197104 4953 scope.go:117] "RemoveContainer" containerID="e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.199349 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data" (OuterVolumeSpecName: "config-data") pod "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" (UID: "9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:44 crc kubenswrapper[4953]: E1011 03:40:44.200096 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57\": container with ID starting with e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57 not found: ID does not exist" containerID="e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.200134 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57"} err="failed to get container status \"e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57\": rpc error: code = NotFound desc = could not find container \"e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57\": container with ID starting with e086b00798c10df6142f47570e51f94084ea0bd6096dd8958948b6570384ab57 not found: ID does not exist" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.267928 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4dcedb20-4ac9-4bbf-bece-be0f325d98df-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.267972 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268034 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/4dcedb20-4ac9-4bbf-bece-be0f325d98df-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268051 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4dcedb20-4ac9-4bbf-bece-be0f325d98df-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268076 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268101 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4dcedb20-4ac9-4bbf-bece-be0f325d98df-ceph\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268112 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/4dcedb20-4ac9-4bbf-bece-be0f325d98df-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268124 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtwxp\" (UniqueName: \"kubernetes.io/projected/4dcedb20-4ac9-4bbf-bece-be0f325d98df-kube-api-access-mtwxp\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268138 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-scripts\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268162 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-config-data\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.268252 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.272029 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.275239 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.275515 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-scripts\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.276218 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dcedb20-4ac9-4bbf-bece-be0f325d98df-config-data\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.279295 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4dcedb20-4ac9-4bbf-bece-be0f325d98df-ceph\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.291327 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtwxp\" (UniqueName: \"kubernetes.io/projected/4dcedb20-4ac9-4bbf-bece-be0f325d98df-kube-api-access-mtwxp\") pod \"manila-share-share1-0\" (UID: \"4dcedb20-4ac9-4bbf-bece-be0f325d98df\") " pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.396349 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.910514 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.912114 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.940291 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.954820 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:44 crc kubenswrapper[4953]: W1011 03:40:44.959252 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4dcedb20_4ac9_4bbf_bece_be0f325d98df.slice/crio-0b916539e7c27f8ce7a375ee0531cd2ff140fc2e82dadb85ccc2625f1b710025 WatchSource:0}: Error finding container 0b916539e7c27f8ce7a375ee0531cd2ff140fc2e82dadb85ccc2625f1b710025: Status 404 returned error can't find the container with id 0b916539e7c27f8ce7a375ee0531cd2ff140fc2e82dadb85ccc2625f1b710025 Oct 11 03:40:44 crc kubenswrapper[4953]: I1011 03:40:44.963089 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:44.995381 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.000940 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.004358 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.011709 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.080221 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-scripts\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.080351 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-config-data\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.080388 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3e55ad88-6f5e-4510-a9c7-25762b58aac0-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.080419 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krt7q\" (UniqueName: \"kubernetes.io/projected/3e55ad88-6f5e-4510-a9c7-25762b58aac0-kube-api-access-krt7q\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.080495 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.080513 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182279 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-config-data\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182353 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3e55ad88-6f5e-4510-a9c7-25762b58aac0-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182390 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krt7q\" (UniqueName: \"kubernetes.io/projected/3e55ad88-6f5e-4510-a9c7-25762b58aac0-kube-api-access-krt7q\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182441 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182463 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182526 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-scripts\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.182785 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3e55ad88-6f5e-4510-a9c7-25762b58aac0-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.187555 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.188292 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.188342 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-scripts\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.200242 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e55ad88-6f5e-4510-a9c7-25762b58aac0-config-data\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.202523 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krt7q\" (UniqueName: \"kubernetes.io/projected/3e55ad88-6f5e-4510-a9c7-25762b58aac0-kube-api-access-krt7q\") pod \"manila-scheduler-0\" (UID: \"3e55ad88-6f5e-4510-a9c7-25762b58aac0\") " pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.438272 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.817031 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f" path="/var/lib/kubelet/pods/9e5a469f-e8bf-4ab4-9754-93c1c9f42e6f/volumes" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.821655 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3ac8eb4-35e7-4e53-bdde-d7590ae4d001" path="/var/lib/kubelet/pods/f3ac8eb4-35e7-4e53-bdde-d7590ae4d001/volumes" Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.922012 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"4dcedb20-4ac9-4bbf-bece-be0f325d98df","Type":"ContainerStarted","Data":"754a806ea4771c6f736df5d9967d59b95b8bf341b77939ab6d53bc3d8513d946"} Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.922059 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"4dcedb20-4ac9-4bbf-bece-be0f325d98df","Type":"ContainerStarted","Data":"0b916539e7c27f8ce7a375ee0531cd2ff140fc2e82dadb85ccc2625f1b710025"} Oct 11 03:40:45 crc kubenswrapper[4953]: I1011 03:40:45.964324 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 11 03:40:45 crc kubenswrapper[4953]: W1011 03:40:45.967230 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e55ad88_6f5e_4510_a9c7_25762b58aac0.slice/crio-31da27d95f9c959c997e0f87fd0d94bbbb3d2b221d70a41cd9f620d99ed6774a WatchSource:0}: Error finding container 31da27d95f9c959c997e0f87fd0d94bbbb3d2b221d70a41cd9f620d99ed6774a: Status 404 returned error can't find the container with id 31da27d95f9c959c997e0f87fd0d94bbbb3d2b221d70a41cd9f620d99ed6774a Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.481083 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.937202 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"4dcedb20-4ac9-4bbf-bece-be0f325d98df","Type":"ContainerStarted","Data":"c28b5794be37ec85545b9a928864e95b99d688fe467f3cdb68dd2c6b8902aedf"} Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.942884 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"3e55ad88-6f5e-4510-a9c7-25762b58aac0","Type":"ContainerStarted","Data":"d990c8fe1215d9c02e3db7dea43da5476e2b342450f83531a8bc68f9ae808d07"} Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.942927 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"3e55ad88-6f5e-4510-a9c7-25762b58aac0","Type":"ContainerStarted","Data":"3c9863eec0f190f8f4e5572bc6e0f9ce4a7753848f99b45bbe4f4590c4ac098b"} Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.942939 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"3e55ad88-6f5e-4510-a9c7-25762b58aac0","Type":"ContainerStarted","Data":"31da27d95f9c959c997e0f87fd0d94bbbb3d2b221d70a41cd9f620d99ed6774a"} Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.958005 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.957985639 podStartE2EDuration="3.957985639s" podCreationTimestamp="2025-10-11 03:40:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:40:46.956882131 +0000 UTC m=+3257.889969785" watchObservedRunningTime="2025-10-11 03:40:46.957985639 +0000 UTC m=+3257.891073283" Oct 11 03:40:46 crc kubenswrapper[4953]: I1011 03:40:46.974804 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.9747847309999997 podStartE2EDuration="2.974784731s" podCreationTimestamp="2025-10-11 03:40:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 03:40:46.974613257 +0000 UTC m=+3257.907700911" watchObservedRunningTime="2025-10-11 03:40:46.974784731 +0000 UTC m=+3257.907872375" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.876281 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.957581 4953 generic.go:334] "Generic (PLEG): container finished" podID="de690dc0-a12a-4321-b838-27a54e039cb1" containerID="f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f" exitCode=137 Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.958427 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-597b9665b8-kw4fw" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.958881 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597b9665b8-kw4fw" event={"ID":"de690dc0-a12a-4321-b838-27a54e039cb1","Type":"ContainerDied","Data":"f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f"} Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.958914 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-597b9665b8-kw4fw" event={"ID":"de690dc0-a12a-4321-b838-27a54e039cb1","Type":"ContainerDied","Data":"5dde06d987d4fdcd610bc1c7f42f57a53b7801a16ee710f3a5023046f69e7e06"} Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.958930 4953 scope.go:117] "RemoveContainer" containerID="9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.962721 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-tls-certs\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.962766 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-secret-key\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.962830 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vv7q\" (UniqueName: \"kubernetes.io/projected/de690dc0-a12a-4321-b838-27a54e039cb1-kube-api-access-6vv7q\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.962847 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-combined-ca-bundle\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.963455 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-config-data\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.963485 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de690dc0-a12a-4321-b838-27a54e039cb1-logs\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.963502 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-scripts\") pod \"de690dc0-a12a-4321-b838-27a54e039cb1\" (UID: \"de690dc0-a12a-4321-b838-27a54e039cb1\") " Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.965928 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de690dc0-a12a-4321-b838-27a54e039cb1-logs" (OuterVolumeSpecName: "logs") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.970667 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de690dc0-a12a-4321-b838-27a54e039cb1-kube-api-access-6vv7q" (OuterVolumeSpecName: "kube-api-access-6vv7q") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "kube-api-access-6vv7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.982967 4953 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de690dc0-a12a-4321-b838-27a54e039cb1-logs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.982997 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vv7q\" (UniqueName: \"kubernetes.io/projected/de690dc0-a12a-4321-b838-27a54e039cb1-kube-api-access-6vv7q\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.983280 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:47 crc kubenswrapper[4953]: I1011 03:40:47.993852 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-scripts" (OuterVolumeSpecName: "scripts") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.000483 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-config-data" (OuterVolumeSpecName: "config-data") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.004552 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.033907 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "de690dc0-a12a-4321-b838-27a54e039cb1" (UID: "de690dc0-a12a-4321-b838-27a54e039cb1"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.084721 4953 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.084764 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.084776 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.084787 4953 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de690dc0-a12a-4321-b838-27a54e039cb1-scripts\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.084800 4953 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de690dc0-a12a-4321-b838-27a54e039cb1-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.158215 4953 scope.go:117] "RemoveContainer" containerID="f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.180175 4953 scope.go:117] "RemoveContainer" containerID="9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6" Oct 11 03:40:48 crc kubenswrapper[4953]: E1011 03:40:48.180637 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6\": container with ID starting with 9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6 not found: ID does not exist" containerID="9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.180703 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6"} err="failed to get container status \"9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6\": rpc error: code = NotFound desc = could not find container \"9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6\": container with ID starting with 9441997de4c0eb23e1e77b5f0e99c9a86a4a69c8c85221693041aeacc853a6d6 not found: ID does not exist" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.180741 4953 scope.go:117] "RemoveContainer" containerID="f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f" Oct 11 03:40:48 crc kubenswrapper[4953]: E1011 03:40:48.181069 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f\": container with ID starting with f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f not found: ID does not exist" containerID="f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.181184 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f"} err="failed to get container status \"f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f\": rpc error: code = NotFound desc = could not find container \"f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f\": container with ID starting with f71e36ac03d3cc284b26e4bd8bfe466451dd879240f3a88723f05044f6ba9f8f not found: ID does not exist" Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.295957 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-597b9665b8-kw4fw"] Oct 11 03:40:48 crc kubenswrapper[4953]: I1011 03:40:48.303281 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-597b9665b8-kw4fw"] Oct 11 03:40:49 crc kubenswrapper[4953]: I1011 03:40:49.812413 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" path="/var/lib/kubelet/pods/de690dc0-a12a-4321-b838-27a54e039cb1/volumes" Oct 11 03:40:52 crc kubenswrapper[4953]: E1011 03:40:52.716173 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:40:54 crc kubenswrapper[4953]: I1011 03:40:54.396517 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 11 03:40:55 crc kubenswrapper[4953]: I1011 03:40:55.438803 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 11 03:41:02 crc kubenswrapper[4953]: E1011 03:41:02.981486 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:41:05 crc kubenswrapper[4953]: I1011 03:41:05.836128 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 11 03:41:06 crc kubenswrapper[4953]: I1011 03:41:06.893156 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 11 03:41:07 crc kubenswrapper[4953]: I1011 03:41:07.468798 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 11 03:41:13 crc kubenswrapper[4953]: E1011 03:41:13.256428 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:41:23 crc kubenswrapper[4953]: E1011 03:41:23.644465 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6802295_eeec_4abf_85f7_6c909c6bcbc2.slice\": RecentStats: unable to find data in memory cache]" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.613834 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fgj2x"] Oct 11 03:41:45 crc kubenswrapper[4953]: E1011 03:41:45.614780 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.614791 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" Oct 11 03:41:45 crc kubenswrapper[4953]: E1011 03:41:45.614817 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon-log" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.614824 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon-log" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.615003 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon-log" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.615027 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="de690dc0-a12a-4321-b838-27a54e039cb1" containerName="horizon" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.616294 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.659380 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fgj2x"] Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.665143 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-utilities\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.665284 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-catalog-content\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.665643 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7fvp\" (UniqueName: \"kubernetes.io/projected/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-kube-api-access-z7fvp\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.767183 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7fvp\" (UniqueName: \"kubernetes.io/projected/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-kube-api-access-z7fvp\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.767289 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-utilities\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.767369 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-catalog-content\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.767927 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-catalog-content\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.768129 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-utilities\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.790076 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7fvp\" (UniqueName: \"kubernetes.io/projected/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-kube-api-access-z7fvp\") pod \"community-operators-fgj2x\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:45 crc kubenswrapper[4953]: I1011 03:41:45.939191 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:46 crc kubenswrapper[4953]: I1011 03:41:46.543182 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fgj2x"] Oct 11 03:41:47 crc kubenswrapper[4953]: I1011 03:41:47.555122 4953 generic.go:334] "Generic (PLEG): container finished" podID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerID="b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214" exitCode=0 Oct 11 03:41:47 crc kubenswrapper[4953]: I1011 03:41:47.555167 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerDied","Data":"b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214"} Oct 11 03:41:47 crc kubenswrapper[4953]: I1011 03:41:47.555403 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerStarted","Data":"7200a88e406ccad7aff998055e02889101b6645e42e59f37807401c40505f7b1"} Oct 11 03:41:48 crc kubenswrapper[4953]: I1011 03:41:48.566140 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerStarted","Data":"0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9"} Oct 11 03:41:49 crc kubenswrapper[4953]: I1011 03:41:49.578881 4953 generic.go:334] "Generic (PLEG): container finished" podID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerID="0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9" exitCode=0 Oct 11 03:41:49 crc kubenswrapper[4953]: I1011 03:41:49.578948 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerDied","Data":"0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9"} Oct 11 03:41:50 crc kubenswrapper[4953]: I1011 03:41:50.595366 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerStarted","Data":"758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9"} Oct 11 03:41:50 crc kubenswrapper[4953]: I1011 03:41:50.629840 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fgj2x" podStartSLOduration=3.206779839 podStartE2EDuration="5.629811562s" podCreationTimestamp="2025-10-11 03:41:45 +0000 UTC" firstStartedPulling="2025-10-11 03:41:47.556713538 +0000 UTC m=+3318.489801182" lastFinishedPulling="2025-10-11 03:41:49.979745261 +0000 UTC m=+3320.912832905" observedRunningTime="2025-10-11 03:41:50.617056111 +0000 UTC m=+3321.550143765" watchObservedRunningTime="2025-10-11 03:41:50.629811562 +0000 UTC m=+3321.562899226" Oct 11 03:41:55 crc kubenswrapper[4953]: I1011 03:41:55.940121 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:55 crc kubenswrapper[4953]: I1011 03:41:55.940649 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:55 crc kubenswrapper[4953]: I1011 03:41:55.990749 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:56 crc kubenswrapper[4953]: I1011 03:41:56.691904 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:56 crc kubenswrapper[4953]: I1011 03:41:56.741414 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fgj2x"] Oct 11 03:41:58 crc kubenswrapper[4953]: I1011 03:41:58.659634 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fgj2x" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="registry-server" containerID="cri-o://758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9" gracePeriod=2 Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.208943 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.287772 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-utilities\") pod \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.287915 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7fvp\" (UniqueName: \"kubernetes.io/projected/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-kube-api-access-z7fvp\") pod \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.288052 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-catalog-content\") pod \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\" (UID: \"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9\") " Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.288922 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-utilities" (OuterVolumeSpecName: "utilities") pod "e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" (UID: "e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.293653 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-kube-api-access-z7fvp" (OuterVolumeSpecName: "kube-api-access-z7fvp") pod "e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" (UID: "e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9"). InnerVolumeSpecName "kube-api-access-z7fvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.332403 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" (UID: "e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.390294 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.390335 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7fvp\" (UniqueName: \"kubernetes.io/projected/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-kube-api-access-z7fvp\") on node \"crc\" DevicePath \"\"" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.390354 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.671542 4953 generic.go:334] "Generic (PLEG): container finished" podID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerID="758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9" exitCode=0 Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.671596 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerDied","Data":"758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9"} Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.671664 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fgj2x" event={"ID":"e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9","Type":"ContainerDied","Data":"7200a88e406ccad7aff998055e02889101b6645e42e59f37807401c40505f7b1"} Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.671690 4953 scope.go:117] "RemoveContainer" containerID="758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.671712 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fgj2x" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.716206 4953 scope.go:117] "RemoveContainer" containerID="0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.718025 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fgj2x"] Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.724764 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fgj2x"] Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.772035 4953 scope.go:117] "RemoveContainer" containerID="b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.799896 4953 scope.go:117] "RemoveContainer" containerID="758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9" Oct 11 03:41:59 crc kubenswrapper[4953]: E1011 03:41:59.800568 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9\": container with ID starting with 758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9 not found: ID does not exist" containerID="758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.800661 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9"} err="failed to get container status \"758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9\": rpc error: code = NotFound desc = could not find container \"758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9\": container with ID starting with 758f4f85e751f12975fb816a410fc6532c03f06c2d89f94ae06a63b589cdb6f9 not found: ID does not exist" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.800694 4953 scope.go:117] "RemoveContainer" containerID="0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9" Oct 11 03:41:59 crc kubenswrapper[4953]: E1011 03:41:59.801151 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9\": container with ID starting with 0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9 not found: ID does not exist" containerID="0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.801198 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9"} err="failed to get container status \"0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9\": rpc error: code = NotFound desc = could not find container \"0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9\": container with ID starting with 0f6b717ce750321a8c6b732fce2194c001235393535316c99126274efbe8c6b9 not found: ID does not exist" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.801233 4953 scope.go:117] "RemoveContainer" containerID="b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214" Oct 11 03:41:59 crc kubenswrapper[4953]: E1011 03:41:59.801575 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214\": container with ID starting with b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214 not found: ID does not exist" containerID="b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.801597 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214"} err="failed to get container status \"b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214\": rpc error: code = NotFound desc = could not find container \"b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214\": container with ID starting with b0633b68ddff03ebdb8899ce2d6ef3d7cd3bd8e6fdf482f976c8913465e48214 not found: ID does not exist" Oct 11 03:41:59 crc kubenswrapper[4953]: I1011 03:41:59.812194 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" path="/var/lib/kubelet/pods/e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9/volumes" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.721400 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 11 03:42:06 crc kubenswrapper[4953]: E1011 03:42:06.722478 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="registry-server" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.722502 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="registry-server" Oct 11 03:42:06 crc kubenswrapper[4953]: E1011 03:42:06.722553 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="extract-utilities" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.722561 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="extract-utilities" Oct 11 03:42:06 crc kubenswrapper[4953]: E1011 03:42:06.722574 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="extract-content" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.722582 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="extract-content" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.722876 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4cb8d4b-cc26-48e0-9f8e-1cc073beb3f9" containerName="registry-server" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.723619 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.725730 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-tgzx8" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.725952 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.726216 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.726644 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.777294 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.836994 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837062 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq2q4\" (UniqueName: \"kubernetes.io/projected/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-kube-api-access-gq2q4\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837289 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-config-data\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837434 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837593 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837681 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837828 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837909 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.837932 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940148 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940224 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940251 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940276 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940307 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq2q4\" (UniqueName: \"kubernetes.io/projected/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-kube-api-access-gq2q4\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940372 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-config-data\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940420 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940474 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.940493 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.941955 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.943070 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.943389 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.944680 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-config-data\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.944965 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.947773 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.950078 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.950667 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.959036 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq2q4\" (UniqueName: \"kubernetes.io/projected/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-kube-api-access-gq2q4\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:06 crc kubenswrapper[4953]: I1011 03:42:06.980649 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"tempest-tests-tempest\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " pod="openstack/tempest-tests-tempest" Oct 11 03:42:07 crc kubenswrapper[4953]: I1011 03:42:07.046095 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 03:42:07 crc kubenswrapper[4953]: I1011 03:42:07.524177 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 11 03:42:07 crc kubenswrapper[4953]: I1011 03:42:07.759847 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79","Type":"ContainerStarted","Data":"ef91a5ee40aa7a7a9afcbadc7780474f0598530c106eb7974ecc7ee7cf06fd21"} Oct 11 03:42:32 crc kubenswrapper[4953]: E1011 03:42:32.979218 4953 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 11 03:42:32 crc kubenswrapper[4953]: E1011 03:42:32.979933 4953 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gq2q4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 11 03:42:32 crc kubenswrapper[4953]: E1011 03:42:32.981136 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" Oct 11 03:42:33 crc kubenswrapper[4953]: E1011 03:42:33.013441 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" Oct 11 03:42:41 crc kubenswrapper[4953]: I1011 03:42:41.316637 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:42:41 crc kubenswrapper[4953]: I1011 03:42:41.317445 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:42:49 crc kubenswrapper[4953]: I1011 03:42:49.312548 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 11 03:42:51 crc kubenswrapper[4953]: I1011 03:42:51.189293 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79","Type":"ContainerStarted","Data":"008487a3d791f2307b2d9d887959598c05221119bb75e50172953295fb77a357"} Oct 11 03:42:51 crc kubenswrapper[4953]: I1011 03:42:51.217507 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.436367328 podStartE2EDuration="46.217481103s" podCreationTimestamp="2025-10-11 03:42:05 +0000 UTC" firstStartedPulling="2025-10-11 03:42:07.529321182 +0000 UTC m=+3338.462408826" lastFinishedPulling="2025-10-11 03:42:49.310434947 +0000 UTC m=+3380.243522601" observedRunningTime="2025-10-11 03:42:51.206120487 +0000 UTC m=+3382.139208151" watchObservedRunningTime="2025-10-11 03:42:51.217481103 +0000 UTC m=+3382.150568767" Oct 11 03:43:02 crc kubenswrapper[4953]: I1011 03:43:02.949726 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b5kwp"] Oct 11 03:43:02 crc kubenswrapper[4953]: I1011 03:43:02.952770 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:02 crc kubenswrapper[4953]: I1011 03:43:02.961866 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b5kwp"] Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.023412 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-catalog-content\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.023527 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9fcw\" (UniqueName: \"kubernetes.io/projected/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-kube-api-access-l9fcw\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.023590 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-utilities\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.125358 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-catalog-content\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.125784 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9fcw\" (UniqueName: \"kubernetes.io/projected/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-kube-api-access-l9fcw\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.125833 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-utilities\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.125927 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-catalog-content\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.126316 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-utilities\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.146115 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9fcw\" (UniqueName: \"kubernetes.io/projected/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-kube-api-access-l9fcw\") pod \"certified-operators-b5kwp\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.307920 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:03 crc kubenswrapper[4953]: I1011 03:43:03.832449 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b5kwp"] Oct 11 03:43:03 crc kubenswrapper[4953]: W1011 03:43:03.835774 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f66439a_c4bc_4d40_a8b3_1aa5082fea3a.slice/crio-16d56e93ecaa053811d0813426870f0c04aec7f69de9c826b484473c8b11b490 WatchSource:0}: Error finding container 16d56e93ecaa053811d0813426870f0c04aec7f69de9c826b484473c8b11b490: Status 404 returned error can't find the container with id 16d56e93ecaa053811d0813426870f0c04aec7f69de9c826b484473c8b11b490 Oct 11 03:43:04 crc kubenswrapper[4953]: I1011 03:43:04.308933 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5kwp" event={"ID":"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a","Type":"ContainerStarted","Data":"16d56e93ecaa053811d0813426870f0c04aec7f69de9c826b484473c8b11b490"} Oct 11 03:43:06 crc kubenswrapper[4953]: I1011 03:43:06.340269 4953 generic.go:334] "Generic (PLEG): container finished" podID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerID="c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828" exitCode=0 Oct 11 03:43:06 crc kubenswrapper[4953]: I1011 03:43:06.340311 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5kwp" event={"ID":"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a","Type":"ContainerDied","Data":"c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828"} Oct 11 03:43:08 crc kubenswrapper[4953]: I1011 03:43:08.360350 4953 generic.go:334] "Generic (PLEG): container finished" podID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerID="b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6" exitCode=0 Oct 11 03:43:08 crc kubenswrapper[4953]: I1011 03:43:08.360520 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5kwp" event={"ID":"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a","Type":"ContainerDied","Data":"b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6"} Oct 11 03:43:09 crc kubenswrapper[4953]: I1011 03:43:09.373833 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5kwp" event={"ID":"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a","Type":"ContainerStarted","Data":"95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca"} Oct 11 03:43:09 crc kubenswrapper[4953]: I1011 03:43:09.402830 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b5kwp" podStartSLOduration=4.736690737 podStartE2EDuration="7.402812364s" podCreationTimestamp="2025-10-11 03:43:02 +0000 UTC" firstStartedPulling="2025-10-11 03:43:06.344410881 +0000 UTC m=+3397.277498525" lastFinishedPulling="2025-10-11 03:43:09.010532468 +0000 UTC m=+3399.943620152" observedRunningTime="2025-10-11 03:43:09.39786246 +0000 UTC m=+3400.330950184" watchObservedRunningTime="2025-10-11 03:43:09.402812364 +0000 UTC m=+3400.335900008" Oct 11 03:43:11 crc kubenswrapper[4953]: I1011 03:43:11.316488 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:43:11 crc kubenswrapper[4953]: I1011 03:43:11.316835 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:43:13 crc kubenswrapper[4953]: I1011 03:43:13.309586 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:13 crc kubenswrapper[4953]: I1011 03:43:13.309653 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:13 crc kubenswrapper[4953]: I1011 03:43:13.380559 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:23 crc kubenswrapper[4953]: I1011 03:43:23.381216 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:23 crc kubenswrapper[4953]: I1011 03:43:23.435678 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b5kwp"] Oct 11 03:43:23 crc kubenswrapper[4953]: I1011 03:43:23.512560 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-b5kwp" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="registry-server" containerID="cri-o://95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca" gracePeriod=2 Oct 11 03:43:23 crc kubenswrapper[4953]: I1011 03:43:23.981962 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.074916 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9fcw\" (UniqueName: \"kubernetes.io/projected/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-kube-api-access-l9fcw\") pod \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.074968 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-utilities\") pod \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.075135 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-catalog-content\") pod \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\" (UID: \"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a\") " Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.076177 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-utilities" (OuterVolumeSpecName: "utilities") pod "9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" (UID: "9f66439a-c4bc-4d40-a8b3-1aa5082fea3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.085906 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-kube-api-access-l9fcw" (OuterVolumeSpecName: "kube-api-access-l9fcw") pod "9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" (UID: "9f66439a-c4bc-4d40-a8b3-1aa5082fea3a"). InnerVolumeSpecName "kube-api-access-l9fcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.119599 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" (UID: "9f66439a-c4bc-4d40-a8b3-1aa5082fea3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.177180 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.177217 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9fcw\" (UniqueName: \"kubernetes.io/projected/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-kube-api-access-l9fcw\") on node \"crc\" DevicePath \"\"" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.177229 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.529426 4953 generic.go:334] "Generic (PLEG): container finished" podID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerID="95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca" exitCode=0 Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.529482 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5kwp" event={"ID":"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a","Type":"ContainerDied","Data":"95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca"} Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.529515 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b5kwp" event={"ID":"9f66439a-c4bc-4d40-a8b3-1aa5082fea3a","Type":"ContainerDied","Data":"16d56e93ecaa053811d0813426870f0c04aec7f69de9c826b484473c8b11b490"} Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.529536 4953 scope.go:117] "RemoveContainer" containerID="95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.529704 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b5kwp" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.563908 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b5kwp"] Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.564901 4953 scope.go:117] "RemoveContainer" containerID="b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.572189 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-b5kwp"] Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.587980 4953 scope.go:117] "RemoveContainer" containerID="c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.638980 4953 scope.go:117] "RemoveContainer" containerID="95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca" Oct 11 03:43:24 crc kubenswrapper[4953]: E1011 03:43:24.639425 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca\": container with ID starting with 95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca not found: ID does not exist" containerID="95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.639468 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca"} err="failed to get container status \"95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca\": rpc error: code = NotFound desc = could not find container \"95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca\": container with ID starting with 95a59601716ba64d256f1eae5b2e2a4cffe9e09ecfc0eeb3815fe45ccc9ad6ca not found: ID does not exist" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.639500 4953 scope.go:117] "RemoveContainer" containerID="b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6" Oct 11 03:43:24 crc kubenswrapper[4953]: E1011 03:43:24.640019 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6\": container with ID starting with b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6 not found: ID does not exist" containerID="b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.640064 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6"} err="failed to get container status \"b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6\": rpc error: code = NotFound desc = could not find container \"b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6\": container with ID starting with b707f36e2d417ed889bdbb194778f58e39aa42e8ef6b63f348339637a99207c6 not found: ID does not exist" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.640092 4953 scope.go:117] "RemoveContainer" containerID="c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828" Oct 11 03:43:24 crc kubenswrapper[4953]: E1011 03:43:24.640442 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828\": container with ID starting with c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828 not found: ID does not exist" containerID="c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828" Oct 11 03:43:24 crc kubenswrapper[4953]: I1011 03:43:24.640481 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828"} err="failed to get container status \"c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828\": rpc error: code = NotFound desc = could not find container \"c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828\": container with ID starting with c102d9bb0b963f2a93922aa08521842c689bf0bb9aa07129d9831431e915f828 not found: ID does not exist" Oct 11 03:43:25 crc kubenswrapper[4953]: I1011 03:43:25.812407 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" path="/var/lib/kubelet/pods/9f66439a-c4bc-4d40-a8b3-1aa5082fea3a/volumes" Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.315986 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.316740 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.316816 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.317848 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"29609757fed816debe2f1f67dd52659345c9175b745f0b4aecb35fdb37e3f952"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.317969 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://29609757fed816debe2f1f67dd52659345c9175b745f0b4aecb35fdb37e3f952" gracePeriod=600 Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.680687 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="29609757fed816debe2f1f67dd52659345c9175b745f0b4aecb35fdb37e3f952" exitCode=0 Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.680765 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"29609757fed816debe2f1f67dd52659345c9175b745f0b4aecb35fdb37e3f952"} Oct 11 03:43:41 crc kubenswrapper[4953]: I1011 03:43:41.681001 4953 scope.go:117] "RemoveContainer" containerID="c8e16303c51493df33c32ef54987cda362c9d04072fa986bc1c70fe44dfe0922" Oct 11 03:43:42 crc kubenswrapper[4953]: I1011 03:43:42.692340 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc"} Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.169701 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw"] Oct 11 03:45:00 crc kubenswrapper[4953]: E1011 03:45:00.170613 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="extract-utilities" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.170627 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="extract-utilities" Oct 11 03:45:00 crc kubenswrapper[4953]: E1011 03:45:00.170643 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="extract-content" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.170650 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="extract-content" Oct 11 03:45:00 crc kubenswrapper[4953]: E1011 03:45:00.170659 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="registry-server" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.170664 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="registry-server" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.170845 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f66439a-c4bc-4d40-a8b3-1aa5082fea3a" containerName="registry-server" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.171460 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.173732 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.177324 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.188865 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw"] Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.245041 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3cecbb7f-48f2-4d08-9cc1-20525149105b-config-volume\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.245181 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3cecbb7f-48f2-4d08-9cc1-20525149105b-secret-volume\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.245237 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv65x\" (UniqueName: \"kubernetes.io/projected/3cecbb7f-48f2-4d08-9cc1-20525149105b-kube-api-access-tv65x\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.346972 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3cecbb7f-48f2-4d08-9cc1-20525149105b-config-volume\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.347089 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3cecbb7f-48f2-4d08-9cc1-20525149105b-secret-volume\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.347144 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv65x\" (UniqueName: \"kubernetes.io/projected/3cecbb7f-48f2-4d08-9cc1-20525149105b-kube-api-access-tv65x\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.347946 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3cecbb7f-48f2-4d08-9cc1-20525149105b-config-volume\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.357426 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3cecbb7f-48f2-4d08-9cc1-20525149105b-secret-volume\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.362992 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv65x\" (UniqueName: \"kubernetes.io/projected/3cecbb7f-48f2-4d08-9cc1-20525149105b-kube-api-access-tv65x\") pod \"collect-profiles-29335905-nf2hw\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.499125 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:00 crc kubenswrapper[4953]: I1011 03:45:00.956654 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw"] Oct 11 03:45:01 crc kubenswrapper[4953]: I1011 03:45:01.461309 4953 generic.go:334] "Generic (PLEG): container finished" podID="3cecbb7f-48f2-4d08-9cc1-20525149105b" containerID="0a81839ca0f1af4b59fe213ba5438f76501fe2dd83b0809521ba3b55b0e2e51c" exitCode=0 Oct 11 03:45:01 crc kubenswrapper[4953]: I1011 03:45:01.461371 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" event={"ID":"3cecbb7f-48f2-4d08-9cc1-20525149105b","Type":"ContainerDied","Data":"0a81839ca0f1af4b59fe213ba5438f76501fe2dd83b0809521ba3b55b0e2e51c"} Oct 11 03:45:01 crc kubenswrapper[4953]: I1011 03:45:01.461583 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" event={"ID":"3cecbb7f-48f2-4d08-9cc1-20525149105b","Type":"ContainerStarted","Data":"e0aa1124c0b979eb30693d06620431414e148f0dae8da4b046d31607726b37e0"} Oct 11 03:45:02 crc kubenswrapper[4953]: I1011 03:45:02.834533 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.000379 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3cecbb7f-48f2-4d08-9cc1-20525149105b-secret-volume\") pod \"3cecbb7f-48f2-4d08-9cc1-20525149105b\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.000855 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3cecbb7f-48f2-4d08-9cc1-20525149105b-config-volume\") pod \"3cecbb7f-48f2-4d08-9cc1-20525149105b\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.000911 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tv65x\" (UniqueName: \"kubernetes.io/projected/3cecbb7f-48f2-4d08-9cc1-20525149105b-kube-api-access-tv65x\") pod \"3cecbb7f-48f2-4d08-9cc1-20525149105b\" (UID: \"3cecbb7f-48f2-4d08-9cc1-20525149105b\") " Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.001631 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cecbb7f-48f2-4d08-9cc1-20525149105b-config-volume" (OuterVolumeSpecName: "config-volume") pod "3cecbb7f-48f2-4d08-9cc1-20525149105b" (UID: "3cecbb7f-48f2-4d08-9cc1-20525149105b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.002435 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3cecbb7f-48f2-4d08-9cc1-20525149105b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.006823 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cecbb7f-48f2-4d08-9cc1-20525149105b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3cecbb7f-48f2-4d08-9cc1-20525149105b" (UID: "3cecbb7f-48f2-4d08-9cc1-20525149105b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.006889 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cecbb7f-48f2-4d08-9cc1-20525149105b-kube-api-access-tv65x" (OuterVolumeSpecName: "kube-api-access-tv65x") pod "3cecbb7f-48f2-4d08-9cc1-20525149105b" (UID: "3cecbb7f-48f2-4d08-9cc1-20525149105b"). InnerVolumeSpecName "kube-api-access-tv65x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.104308 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3cecbb7f-48f2-4d08-9cc1-20525149105b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.104348 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tv65x\" (UniqueName: \"kubernetes.io/projected/3cecbb7f-48f2-4d08-9cc1-20525149105b-kube-api-access-tv65x\") on node \"crc\" DevicePath \"\"" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.506002 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" event={"ID":"3cecbb7f-48f2-4d08-9cc1-20525149105b","Type":"ContainerDied","Data":"e0aa1124c0b979eb30693d06620431414e148f0dae8da4b046d31607726b37e0"} Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.506052 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0aa1124c0b979eb30693d06620431414e148f0dae8da4b046d31607726b37e0" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.506118 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335905-nf2hw" Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.909640 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp"] Oct 11 03:45:03 crc kubenswrapper[4953]: I1011 03:45:03.925223 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335860-zxgtp"] Oct 11 03:45:05 crc kubenswrapper[4953]: I1011 03:45:05.821097 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b70ea1d6-5c99-4c30-8df7-2baff910aeab" path="/var/lib/kubelet/pods/b70ea1d6-5c99-4c30-8df7-2baff910aeab/volumes" Oct 11 03:45:41 crc kubenswrapper[4953]: I1011 03:45:41.316473 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:45:41 crc kubenswrapper[4953]: I1011 03:45:41.317123 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:45:52 crc kubenswrapper[4953]: I1011 03:45:52.629297 4953 scope.go:117] "RemoveContainer" containerID="e6c796e07f73cbe9408bdd465ec0a1bf7486fd84b3111de90d3494bbba99cea3" Oct 11 03:46:11 crc kubenswrapper[4953]: I1011 03:46:11.316622 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:46:11 crc kubenswrapper[4953]: I1011 03:46:11.317080 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.411080 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qs62g"] Oct 11 03:46:21 crc kubenswrapper[4953]: E1011 03:46:21.412221 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cecbb7f-48f2-4d08-9cc1-20525149105b" containerName="collect-profiles" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.412238 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cecbb7f-48f2-4d08-9cc1-20525149105b" containerName="collect-profiles" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.412470 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cecbb7f-48f2-4d08-9cc1-20525149105b" containerName="collect-profiles" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.414043 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.425767 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qs62g"] Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.533719 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-utilities\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.533800 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-catalog-content\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.533826 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv7sv\" (UniqueName: \"kubernetes.io/projected/0fc75965-fef8-4f2c-8eb2-b634be806b87-kube-api-access-wv7sv\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.635333 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-utilities\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.635427 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-catalog-content\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.635464 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv7sv\" (UniqueName: \"kubernetes.io/projected/0fc75965-fef8-4f2c-8eb2-b634be806b87-kube-api-access-wv7sv\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.636335 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-utilities\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.636619 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-catalog-content\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.657474 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv7sv\" (UniqueName: \"kubernetes.io/projected/0fc75965-fef8-4f2c-8eb2-b634be806b87-kube-api-access-wv7sv\") pod \"redhat-marketplace-qs62g\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:21 crc kubenswrapper[4953]: I1011 03:46:21.765821 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:22 crc kubenswrapper[4953]: I1011 03:46:22.240823 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qs62g"] Oct 11 03:46:23 crc kubenswrapper[4953]: I1011 03:46:23.216737 4953 generic.go:334] "Generic (PLEG): container finished" podID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerID="10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db" exitCode=0 Oct 11 03:46:23 crc kubenswrapper[4953]: I1011 03:46:23.216810 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerDied","Data":"10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db"} Oct 11 03:46:23 crc kubenswrapper[4953]: I1011 03:46:23.217063 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerStarted","Data":"947dcf67b5fec652ea22072aa5432be1dbd5941b761fef21c38fb268086ec2ca"} Oct 11 03:46:23 crc kubenswrapper[4953]: I1011 03:46:23.219163 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:46:24 crc kubenswrapper[4953]: I1011 03:46:24.226800 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerStarted","Data":"318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1"} Oct 11 03:46:25 crc kubenswrapper[4953]: I1011 03:46:25.235445 4953 generic.go:334] "Generic (PLEG): container finished" podID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerID="318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1" exitCode=0 Oct 11 03:46:25 crc kubenswrapper[4953]: I1011 03:46:25.235836 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerDied","Data":"318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1"} Oct 11 03:46:26 crc kubenswrapper[4953]: I1011 03:46:26.246857 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerStarted","Data":"b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1"} Oct 11 03:46:26 crc kubenswrapper[4953]: I1011 03:46:26.267026 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qs62g" podStartSLOduration=2.511050687 podStartE2EDuration="5.26701134s" podCreationTimestamp="2025-10-11 03:46:21 +0000 UTC" firstStartedPulling="2025-10-11 03:46:23.21890544 +0000 UTC m=+3594.151993084" lastFinishedPulling="2025-10-11 03:46:25.974866083 +0000 UTC m=+3596.907953737" observedRunningTime="2025-10-11 03:46:26.260420244 +0000 UTC m=+3597.193507888" watchObservedRunningTime="2025-10-11 03:46:26.26701134 +0000 UTC m=+3597.200098984" Oct 11 03:46:31 crc kubenswrapper[4953]: I1011 03:46:31.766962 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:31 crc kubenswrapper[4953]: I1011 03:46:31.767593 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:31 crc kubenswrapper[4953]: I1011 03:46:31.820033 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:32 crc kubenswrapper[4953]: I1011 03:46:32.354932 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:32 crc kubenswrapper[4953]: I1011 03:46:32.415838 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qs62g"] Oct 11 03:46:34 crc kubenswrapper[4953]: I1011 03:46:34.320764 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qs62g" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="registry-server" containerID="cri-o://b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1" gracePeriod=2 Oct 11 03:46:34 crc kubenswrapper[4953]: I1011 03:46:34.872955 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.022022 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-utilities\") pod \"0fc75965-fef8-4f2c-8eb2-b634be806b87\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.022238 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv7sv\" (UniqueName: \"kubernetes.io/projected/0fc75965-fef8-4f2c-8eb2-b634be806b87-kube-api-access-wv7sv\") pod \"0fc75965-fef8-4f2c-8eb2-b634be806b87\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.022337 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-catalog-content\") pod \"0fc75965-fef8-4f2c-8eb2-b634be806b87\" (UID: \"0fc75965-fef8-4f2c-8eb2-b634be806b87\") " Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.023569 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-utilities" (OuterVolumeSpecName: "utilities") pod "0fc75965-fef8-4f2c-8eb2-b634be806b87" (UID: "0fc75965-fef8-4f2c-8eb2-b634be806b87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.037960 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fc75965-fef8-4f2c-8eb2-b634be806b87-kube-api-access-wv7sv" (OuterVolumeSpecName: "kube-api-access-wv7sv") pod "0fc75965-fef8-4f2c-8eb2-b634be806b87" (UID: "0fc75965-fef8-4f2c-8eb2-b634be806b87"). InnerVolumeSpecName "kube-api-access-wv7sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.042642 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fc75965-fef8-4f2c-8eb2-b634be806b87" (UID: "0fc75965-fef8-4f2c-8eb2-b634be806b87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.124988 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.125562 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fc75965-fef8-4f2c-8eb2-b634be806b87-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.125680 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv7sv\" (UniqueName: \"kubernetes.io/projected/0fc75965-fef8-4f2c-8eb2-b634be806b87-kube-api-access-wv7sv\") on node \"crc\" DevicePath \"\"" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.332275 4953 generic.go:334] "Generic (PLEG): container finished" podID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerID="b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1" exitCode=0 Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.332317 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerDied","Data":"b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1"} Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.332341 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qs62g" event={"ID":"0fc75965-fef8-4f2c-8eb2-b634be806b87","Type":"ContainerDied","Data":"947dcf67b5fec652ea22072aa5432be1dbd5941b761fef21c38fb268086ec2ca"} Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.332357 4953 scope.go:117] "RemoveContainer" containerID="b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.332453 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qs62g" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.368474 4953 scope.go:117] "RemoveContainer" containerID="318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.378730 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qs62g"] Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.401140 4953 scope.go:117] "RemoveContainer" containerID="10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.413572 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qs62g"] Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.444312 4953 scope.go:117] "RemoveContainer" containerID="b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1" Oct 11 03:46:35 crc kubenswrapper[4953]: E1011 03:46:35.444909 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1\": container with ID starting with b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1 not found: ID does not exist" containerID="b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.444939 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1"} err="failed to get container status \"b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1\": rpc error: code = NotFound desc = could not find container \"b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1\": container with ID starting with b9404e5053e8b7737b68c725672b6bf57bd97c37af4ed83c7ee292fbc56db1a1 not found: ID does not exist" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.444966 4953 scope.go:117] "RemoveContainer" containerID="318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1" Oct 11 03:46:35 crc kubenswrapper[4953]: E1011 03:46:35.445391 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1\": container with ID starting with 318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1 not found: ID does not exist" containerID="318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.445422 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1"} err="failed to get container status \"318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1\": rpc error: code = NotFound desc = could not find container \"318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1\": container with ID starting with 318546db94cb454e211e8349dd73b0de9e3fd0b414611f620b13ccc77290e1a1 not found: ID does not exist" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.445439 4953 scope.go:117] "RemoveContainer" containerID="10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db" Oct 11 03:46:35 crc kubenswrapper[4953]: E1011 03:46:35.445733 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db\": container with ID starting with 10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db not found: ID does not exist" containerID="10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.445753 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db"} err="failed to get container status \"10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db\": rpc error: code = NotFound desc = could not find container \"10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db\": container with ID starting with 10326c3b27f43162b93e1c4852eda09e8bf7acea9ae0bc044125edbde5f348db not found: ID does not exist" Oct 11 03:46:35 crc kubenswrapper[4953]: I1011 03:46:35.806885 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" path="/var/lib/kubelet/pods/0fc75965-fef8-4f2c-8eb2-b634be806b87/volumes" Oct 11 03:46:41 crc kubenswrapper[4953]: I1011 03:46:41.316266 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:46:41 crc kubenswrapper[4953]: I1011 03:46:41.316847 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:46:41 crc kubenswrapper[4953]: I1011 03:46:41.316898 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:46:41 crc kubenswrapper[4953]: I1011 03:46:41.317675 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:46:41 crc kubenswrapper[4953]: I1011 03:46:41.317733 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" gracePeriod=600 Oct 11 03:46:41 crc kubenswrapper[4953]: E1011 03:46:41.441723 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:46:42 crc kubenswrapper[4953]: I1011 03:46:42.397033 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" exitCode=0 Oct 11 03:46:42 crc kubenswrapper[4953]: I1011 03:46:42.397118 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc"} Oct 11 03:46:42 crc kubenswrapper[4953]: I1011 03:46:42.397518 4953 scope.go:117] "RemoveContainer" containerID="29609757fed816debe2f1f67dd52659345c9175b745f0b4aecb35fdb37e3f952" Oct 11 03:46:42 crc kubenswrapper[4953]: I1011 03:46:42.398262 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:46:42 crc kubenswrapper[4953]: E1011 03:46:42.398545 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:46:51 crc kubenswrapper[4953]: E1011 03:46:51.941559 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Oct 11 03:46:53 crc kubenswrapper[4953]: I1011 03:46:53.795910 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:46:53 crc kubenswrapper[4953]: E1011 03:46:53.797644 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:47:06 crc kubenswrapper[4953]: I1011 03:47:06.795474 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:47:06 crc kubenswrapper[4953]: E1011 03:47:06.796288 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:47:21 crc kubenswrapper[4953]: I1011 03:47:21.796523 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:47:21 crc kubenswrapper[4953]: E1011 03:47:21.797399 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:47:33 crc kubenswrapper[4953]: I1011 03:47:33.799546 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:47:33 crc kubenswrapper[4953]: E1011 03:47:33.800377 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:47:44 crc kubenswrapper[4953]: I1011 03:47:44.795301 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:47:44 crc kubenswrapper[4953]: E1011 03:47:44.796243 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:47:58 crc kubenswrapper[4953]: I1011 03:47:58.795598 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:47:58 crc kubenswrapper[4953]: E1011 03:47:58.796542 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:48:11 crc kubenswrapper[4953]: I1011 03:48:11.795157 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:48:11 crc kubenswrapper[4953]: E1011 03:48:11.796031 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.032075 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ltdz9"] Oct 11 03:48:14 crc kubenswrapper[4953]: E1011 03:48:14.033137 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="extract-utilities" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.033152 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="extract-utilities" Oct 11 03:48:14 crc kubenswrapper[4953]: E1011 03:48:14.033165 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="extract-content" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.033171 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="extract-content" Oct 11 03:48:14 crc kubenswrapper[4953]: E1011 03:48:14.033191 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="registry-server" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.033206 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="registry-server" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.033384 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fc75965-fef8-4f2c-8eb2-b634be806b87" containerName="registry-server" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.034707 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.048836 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ltdz9"] Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.170764 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-utilities\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.170823 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l865v\" (UniqueName: \"kubernetes.io/projected/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-kube-api-access-l865v\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.171049 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-catalog-content\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.273445 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-catalog-content\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.273552 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-utilities\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.273571 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l865v\" (UniqueName: \"kubernetes.io/projected/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-kube-api-access-l865v\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.274046 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-catalog-content\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.274106 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-utilities\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.293534 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l865v\" (UniqueName: \"kubernetes.io/projected/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-kube-api-access-l865v\") pod \"redhat-operators-ltdz9\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.371846 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:14 crc kubenswrapper[4953]: I1011 03:48:14.851318 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ltdz9"] Oct 11 03:48:15 crc kubenswrapper[4953]: I1011 03:48:15.221294 4953 generic.go:334] "Generic (PLEG): container finished" podID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerID="e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581" exitCode=0 Oct 11 03:48:15 crc kubenswrapper[4953]: I1011 03:48:15.221535 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerDied","Data":"e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581"} Oct 11 03:48:15 crc kubenswrapper[4953]: I1011 03:48:15.221565 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerStarted","Data":"f1de14dffede463d02aa372a5fe506c0afdd929067a5e2985ccd883921e5f11e"} Oct 11 03:48:16 crc kubenswrapper[4953]: I1011 03:48:16.236382 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerStarted","Data":"9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839"} Oct 11 03:48:17 crc kubenswrapper[4953]: I1011 03:48:17.249921 4953 generic.go:334] "Generic (PLEG): container finished" podID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerID="9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839" exitCode=0 Oct 11 03:48:17 crc kubenswrapper[4953]: I1011 03:48:17.249970 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerDied","Data":"9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839"} Oct 11 03:48:18 crc kubenswrapper[4953]: I1011 03:48:18.265462 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerStarted","Data":"c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee"} Oct 11 03:48:18 crc kubenswrapper[4953]: I1011 03:48:18.290566 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ltdz9" podStartSLOduration=1.716265451 podStartE2EDuration="4.29053862s" podCreationTimestamp="2025-10-11 03:48:14 +0000 UTC" firstStartedPulling="2025-10-11 03:48:15.223544753 +0000 UTC m=+3706.156632407" lastFinishedPulling="2025-10-11 03:48:17.797817932 +0000 UTC m=+3708.730905576" observedRunningTime="2025-10-11 03:48:18.289113514 +0000 UTC m=+3709.222201148" watchObservedRunningTime="2025-10-11 03:48:18.29053862 +0000 UTC m=+3709.223626254" Oct 11 03:48:22 crc kubenswrapper[4953]: I1011 03:48:22.795979 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:48:22 crc kubenswrapper[4953]: E1011 03:48:22.796754 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:48:24 crc kubenswrapper[4953]: I1011 03:48:24.372404 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:24 crc kubenswrapper[4953]: I1011 03:48:24.372840 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:24 crc kubenswrapper[4953]: I1011 03:48:24.439050 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:25 crc kubenswrapper[4953]: I1011 03:48:25.414359 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:25 crc kubenswrapper[4953]: I1011 03:48:25.464998 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ltdz9"] Oct 11 03:48:27 crc kubenswrapper[4953]: I1011 03:48:27.368261 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ltdz9" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="registry-server" containerID="cri-o://c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee" gracePeriod=2 Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.043503 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.165176 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-catalog-content\") pod \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.165291 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l865v\" (UniqueName: \"kubernetes.io/projected/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-kube-api-access-l865v\") pod \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.165425 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-utilities\") pod \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\" (UID: \"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f\") " Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.166324 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-utilities" (OuterVolumeSpecName: "utilities") pod "a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" (UID: "a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.187424 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-kube-api-access-l865v" (OuterVolumeSpecName: "kube-api-access-l865v") pod "a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" (UID: "a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f"). InnerVolumeSpecName "kube-api-access-l865v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.259236 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" (UID: "a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.267642 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.267682 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.267697 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l865v\" (UniqueName: \"kubernetes.io/projected/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f-kube-api-access-l865v\") on node \"crc\" DevicePath \"\"" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.377356 4953 generic.go:334] "Generic (PLEG): container finished" podID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerID="c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee" exitCode=0 Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.377398 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerDied","Data":"c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee"} Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.377421 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ltdz9" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.377459 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ltdz9" event={"ID":"a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f","Type":"ContainerDied","Data":"f1de14dffede463d02aa372a5fe506c0afdd929067a5e2985ccd883921e5f11e"} Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.377477 4953 scope.go:117] "RemoveContainer" containerID="c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.407280 4953 scope.go:117] "RemoveContainer" containerID="9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.409932 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ltdz9"] Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.421919 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ltdz9"] Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.435165 4953 scope.go:117] "RemoveContainer" containerID="e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.509923 4953 scope.go:117] "RemoveContainer" containerID="c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee" Oct 11 03:48:28 crc kubenswrapper[4953]: E1011 03:48:28.519639 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee\": container with ID starting with c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee not found: ID does not exist" containerID="c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.519698 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee"} err="failed to get container status \"c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee\": rpc error: code = NotFound desc = could not find container \"c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee\": container with ID starting with c1f4297397eaee5ffb7e859ba78ff5fcae658ae0f9e76dc6e2b45fdfadcb9cee not found: ID does not exist" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.519724 4953 scope.go:117] "RemoveContainer" containerID="9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839" Oct 11 03:48:28 crc kubenswrapper[4953]: E1011 03:48:28.520200 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839\": container with ID starting with 9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839 not found: ID does not exist" containerID="9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.520244 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839"} err="failed to get container status \"9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839\": rpc error: code = NotFound desc = could not find container \"9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839\": container with ID starting with 9da03f0540c1ad0036ee69b0cfb74fc418559072b32ec733427d7c86d6c8b839 not found: ID does not exist" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.520272 4953 scope.go:117] "RemoveContainer" containerID="e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581" Oct 11 03:48:28 crc kubenswrapper[4953]: E1011 03:48:28.520532 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581\": container with ID starting with e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581 not found: ID does not exist" containerID="e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581" Oct 11 03:48:28 crc kubenswrapper[4953]: I1011 03:48:28.520557 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581"} err="failed to get container status \"e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581\": rpc error: code = NotFound desc = could not find container \"e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581\": container with ID starting with e22a90feae53cefdbf174624dca702e16956b13fd160737cf4e1f3327f817581 not found: ID does not exist" Oct 11 03:48:29 crc kubenswrapper[4953]: I1011 03:48:29.806233 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" path="/var/lib/kubelet/pods/a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f/volumes" Oct 11 03:48:33 crc kubenswrapper[4953]: I1011 03:48:33.795253 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:48:33 crc kubenswrapper[4953]: E1011 03:48:33.796932 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:48:48 crc kubenswrapper[4953]: I1011 03:48:48.796050 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:48:48 crc kubenswrapper[4953]: E1011 03:48:48.796849 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:48:59 crc kubenswrapper[4953]: I1011 03:48:59.801123 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:48:59 crc kubenswrapper[4953]: E1011 03:48:59.803738 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:49:13 crc kubenswrapper[4953]: I1011 03:49:13.796333 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:49:13 crc kubenswrapper[4953]: E1011 03:49:13.797107 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:49:28 crc kubenswrapper[4953]: I1011 03:49:28.795032 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:49:28 crc kubenswrapper[4953]: E1011 03:49:28.797085 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:49:42 crc kubenswrapper[4953]: I1011 03:49:42.795696 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:49:42 crc kubenswrapper[4953]: E1011 03:49:42.796378 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:49:44 crc kubenswrapper[4953]: I1011 03:49:44.040965 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-dm7jz"] Oct 11 03:49:44 crc kubenswrapper[4953]: I1011 03:49:44.049858 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-dm7jz"] Oct 11 03:49:45 crc kubenswrapper[4953]: I1011 03:49:45.807921 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3470028-8d0f-4e6a-8c32-684343626ece" path="/var/lib/kubelet/pods/b3470028-8d0f-4e6a-8c32-684343626ece/volumes" Oct 11 03:49:52 crc kubenswrapper[4953]: I1011 03:49:52.813995 4953 scope.go:117] "RemoveContainer" containerID="edc830828377f6faa1c8b84385738117becf7071d2295e580adf7ce6e902e100" Oct 11 03:49:55 crc kubenswrapper[4953]: I1011 03:49:55.045205 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-94a2-account-create-cq9hf"] Oct 11 03:49:55 crc kubenswrapper[4953]: I1011 03:49:55.055732 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-94a2-account-create-cq9hf"] Oct 11 03:49:55 crc kubenswrapper[4953]: I1011 03:49:55.808164 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19a5077f-644f-4bc8-aa46-e2407d412716" path="/var/lib/kubelet/pods/19a5077f-644f-4bc8-aa46-e2407d412716/volumes" Oct 11 03:49:56 crc kubenswrapper[4953]: I1011 03:49:56.797405 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:49:56 crc kubenswrapper[4953]: E1011 03:49:56.799161 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:50:09 crc kubenswrapper[4953]: I1011 03:50:09.801909 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:50:09 crc kubenswrapper[4953]: E1011 03:50:09.802497 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:50:18 crc kubenswrapper[4953]: I1011 03:50:18.039427 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-7z484"] Oct 11 03:50:18 crc kubenswrapper[4953]: I1011 03:50:18.047301 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-7z484"] Oct 11 03:50:19 crc kubenswrapper[4953]: I1011 03:50:19.806329 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9c2b416-67ff-4474-ad70-013c9b0e17d5" path="/var/lib/kubelet/pods/b9c2b416-67ff-4474-ad70-013c9b0e17d5/volumes" Oct 11 03:50:22 crc kubenswrapper[4953]: I1011 03:50:22.796761 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:50:22 crc kubenswrapper[4953]: E1011 03:50:22.797557 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:50:33 crc kubenswrapper[4953]: I1011 03:50:33.796588 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:50:33 crc kubenswrapper[4953]: E1011 03:50:33.798317 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:50:48 crc kubenswrapper[4953]: I1011 03:50:48.795919 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:50:48 crc kubenswrapper[4953]: E1011 03:50:48.796771 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:50:52 crc kubenswrapper[4953]: I1011 03:50:52.881813 4953 scope.go:117] "RemoveContainer" containerID="18ab27da3818c7432bf3f7e17e422c062b6e18d406bee81e30eb6b984fb1cc6f" Oct 11 03:50:52 crc kubenswrapper[4953]: I1011 03:50:52.924577 4953 scope.go:117] "RemoveContainer" containerID="1f27c6e9dc9f8f41e4d404a696583f15a3abe106643b1a91cf8a82e276ed0766" Oct 11 03:51:00 crc kubenswrapper[4953]: I1011 03:51:00.796062 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:51:00 crc kubenswrapper[4953]: E1011 03:51:00.797166 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:51:13 crc kubenswrapper[4953]: I1011 03:51:13.801312 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:51:13 crc kubenswrapper[4953]: E1011 03:51:13.804009 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:51:25 crc kubenswrapper[4953]: I1011 03:51:25.795596 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:51:25 crc kubenswrapper[4953]: E1011 03:51:25.796405 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:51:40 crc kubenswrapper[4953]: I1011 03:51:40.795436 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:51:40 crc kubenswrapper[4953]: E1011 03:51:40.796392 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.725925 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rz6zs"] Oct 11 03:51:50 crc kubenswrapper[4953]: E1011 03:51:50.726960 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="extract-utilities" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.726980 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="extract-utilities" Oct 11 03:51:50 crc kubenswrapper[4953]: E1011 03:51:50.727007 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="registry-server" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.727015 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="registry-server" Oct 11 03:51:50 crc kubenswrapper[4953]: E1011 03:51:50.727038 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="extract-content" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.727047 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="extract-content" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.727302 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8795fb3-8fd1-4f49-b97b-0304ac9d8c4f" containerName="registry-server" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.728977 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.734995 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rz6zs"] Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.908240 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-catalog-content\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.908331 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-utilities\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:50 crc kubenswrapper[4953]: I1011 03:51:50.908562 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkmf4\" (UniqueName: \"kubernetes.io/projected/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-kube-api-access-qkmf4\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.010838 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-catalog-content\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.010914 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-utilities\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.011051 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkmf4\" (UniqueName: \"kubernetes.io/projected/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-kube-api-access-qkmf4\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.011924 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-catalog-content\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.012167 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-utilities\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.039560 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkmf4\" (UniqueName: \"kubernetes.io/projected/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-kube-api-access-qkmf4\") pod \"community-operators-rz6zs\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.052463 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:51:51 crc kubenswrapper[4953]: I1011 03:51:51.597894 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rz6zs"] Oct 11 03:51:52 crc kubenswrapper[4953]: I1011 03:51:52.116052 4953 generic.go:334] "Generic (PLEG): container finished" podID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerID="ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c" exitCode=0 Oct 11 03:51:52 crc kubenswrapper[4953]: I1011 03:51:52.116098 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rz6zs" event={"ID":"d468dc61-7f1a-4241-b5eb-4d938f15b0dd","Type":"ContainerDied","Data":"ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c"} Oct 11 03:51:52 crc kubenswrapper[4953]: I1011 03:51:52.116389 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rz6zs" event={"ID":"d468dc61-7f1a-4241-b5eb-4d938f15b0dd","Type":"ContainerStarted","Data":"d0113019dd9ea8fde23c4195d5962a47bacd022b86817c340c9391af7b475edf"} Oct 11 03:51:52 crc kubenswrapper[4953]: I1011 03:51:52.118694 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:51:52 crc kubenswrapper[4953]: I1011 03:51:52.795208 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:51:53 crc kubenswrapper[4953]: I1011 03:51:53.126671 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"8c81bf0695abb2418d6c1a90a17daf9013b1607bf2ff85794c178b8ee34be090"} Oct 11 03:51:54 crc kubenswrapper[4953]: I1011 03:51:54.136159 4953 generic.go:334] "Generic (PLEG): container finished" podID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerID="80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949" exitCode=0 Oct 11 03:51:54 crc kubenswrapper[4953]: I1011 03:51:54.136320 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rz6zs" event={"ID":"d468dc61-7f1a-4241-b5eb-4d938f15b0dd","Type":"ContainerDied","Data":"80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949"} Oct 11 03:51:55 crc kubenswrapper[4953]: I1011 03:51:55.150139 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rz6zs" event={"ID":"d468dc61-7f1a-4241-b5eb-4d938f15b0dd","Type":"ContainerStarted","Data":"c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260"} Oct 11 03:51:55 crc kubenswrapper[4953]: I1011 03:51:55.181143 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rz6zs" podStartSLOduration=2.6694018379999997 podStartE2EDuration="5.18111667s" podCreationTimestamp="2025-10-11 03:51:50 +0000 UTC" firstStartedPulling="2025-10-11 03:51:52.118485324 +0000 UTC m=+3923.051572968" lastFinishedPulling="2025-10-11 03:51:54.630200156 +0000 UTC m=+3925.563287800" observedRunningTime="2025-10-11 03:51:55.174533734 +0000 UTC m=+3926.107621388" watchObservedRunningTime="2025-10-11 03:51:55.18111667 +0000 UTC m=+3926.114204314" Oct 11 03:52:01 crc kubenswrapper[4953]: I1011 03:52:01.054196 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:52:01 crc kubenswrapper[4953]: I1011 03:52:01.054811 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:52:01 crc kubenswrapper[4953]: I1011 03:52:01.146098 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:52:01 crc kubenswrapper[4953]: I1011 03:52:01.255394 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:52:01 crc kubenswrapper[4953]: I1011 03:52:01.395071 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rz6zs"] Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.224835 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rz6zs" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="registry-server" containerID="cri-o://c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260" gracePeriod=2 Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.879823 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.973541 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-utilities\") pod \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.973691 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkmf4\" (UniqueName: \"kubernetes.io/projected/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-kube-api-access-qkmf4\") pod \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.973747 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-catalog-content\") pod \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\" (UID: \"d468dc61-7f1a-4241-b5eb-4d938f15b0dd\") " Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.974201 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-utilities" (OuterVolumeSpecName: "utilities") pod "d468dc61-7f1a-4241-b5eb-4d938f15b0dd" (UID: "d468dc61-7f1a-4241-b5eb-4d938f15b0dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.974327 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:52:03 crc kubenswrapper[4953]: I1011 03:52:03.980688 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-kube-api-access-qkmf4" (OuterVolumeSpecName: "kube-api-access-qkmf4") pod "d468dc61-7f1a-4241-b5eb-4d938f15b0dd" (UID: "d468dc61-7f1a-4241-b5eb-4d938f15b0dd"). InnerVolumeSpecName "kube-api-access-qkmf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.029703 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d468dc61-7f1a-4241-b5eb-4d938f15b0dd" (UID: "d468dc61-7f1a-4241-b5eb-4d938f15b0dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.076061 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkmf4\" (UniqueName: \"kubernetes.io/projected/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-kube-api-access-qkmf4\") on node \"crc\" DevicePath \"\"" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.076093 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d468dc61-7f1a-4241-b5eb-4d938f15b0dd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.234749 4953 generic.go:334] "Generic (PLEG): container finished" podID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerID="c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260" exitCode=0 Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.234800 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rz6zs" event={"ID":"d468dc61-7f1a-4241-b5eb-4d938f15b0dd","Type":"ContainerDied","Data":"c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260"} Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.234835 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rz6zs" event={"ID":"d468dc61-7f1a-4241-b5eb-4d938f15b0dd","Type":"ContainerDied","Data":"d0113019dd9ea8fde23c4195d5962a47bacd022b86817c340c9391af7b475edf"} Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.234856 4953 scope.go:117] "RemoveContainer" containerID="c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.234988 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rz6zs" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.267825 4953 scope.go:117] "RemoveContainer" containerID="80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.269711 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rz6zs"] Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.279953 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rz6zs"] Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.287999 4953 scope.go:117] "RemoveContainer" containerID="ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.331222 4953 scope.go:117] "RemoveContainer" containerID="c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260" Oct 11 03:52:04 crc kubenswrapper[4953]: E1011 03:52:04.331709 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260\": container with ID starting with c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260 not found: ID does not exist" containerID="c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.331738 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260"} err="failed to get container status \"c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260\": rpc error: code = NotFound desc = could not find container \"c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260\": container with ID starting with c40d2cc9e516ee5a7444af2d6e8fad084acfee28c61a9c2dadccf6af8191b260 not found: ID does not exist" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.331759 4953 scope.go:117] "RemoveContainer" containerID="80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949" Oct 11 03:52:04 crc kubenswrapper[4953]: E1011 03:52:04.332082 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949\": container with ID starting with 80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949 not found: ID does not exist" containerID="80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.332101 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949"} err="failed to get container status \"80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949\": rpc error: code = NotFound desc = could not find container \"80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949\": container with ID starting with 80f1634a298d34cfe0ffda239b2037819617954c568ffb15bfe884c90f93b949 not found: ID does not exist" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.332113 4953 scope.go:117] "RemoveContainer" containerID="ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c" Oct 11 03:52:04 crc kubenswrapper[4953]: E1011 03:52:04.332346 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c\": container with ID starting with ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c not found: ID does not exist" containerID="ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c" Oct 11 03:52:04 crc kubenswrapper[4953]: I1011 03:52:04.332372 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c"} err="failed to get container status \"ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c\": rpc error: code = NotFound desc = could not find container \"ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c\": container with ID starting with ad57292c06c18135d9484ff1d8136ba3539f0840267658eaa8ba51d7a9f6202c not found: ID does not exist" Oct 11 03:52:05 crc kubenswrapper[4953]: I1011 03:52:05.808172 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" path="/var/lib/kubelet/pods/d468dc61-7f1a-4241-b5eb-4d938f15b0dd/volumes" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.775220 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zthj7"] Oct 11 03:53:13 crc kubenswrapper[4953]: E1011 03:53:13.776371 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="extract-utilities" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.776388 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="extract-utilities" Oct 11 03:53:13 crc kubenswrapper[4953]: E1011 03:53:13.776416 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="registry-server" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.776424 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="registry-server" Oct 11 03:53:13 crc kubenswrapper[4953]: E1011 03:53:13.776444 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="extract-content" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.776454 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="extract-content" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.776747 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="d468dc61-7f1a-4241-b5eb-4d938f15b0dd" containerName="registry-server" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.778345 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.810375 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zthj7"] Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.868418 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-utilities\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.868466 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-catalog-content\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.868498 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtsx2\" (UniqueName: \"kubernetes.io/projected/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-kube-api-access-wtsx2\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.970684 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-catalog-content\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.971233 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-catalog-content\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.972454 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-utilities\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.972553 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtsx2\" (UniqueName: \"kubernetes.io/projected/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-kube-api-access-wtsx2\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.972802 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-utilities\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:13 crc kubenswrapper[4953]: I1011 03:53:13.992448 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtsx2\" (UniqueName: \"kubernetes.io/projected/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-kube-api-access-wtsx2\") pod \"certified-operators-zthj7\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:14 crc kubenswrapper[4953]: I1011 03:53:14.099755 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:14 crc kubenswrapper[4953]: I1011 03:53:14.611588 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zthj7"] Oct 11 03:53:14 crc kubenswrapper[4953]: I1011 03:53:14.885531 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerStarted","Data":"81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e"} Oct 11 03:53:14 crc kubenswrapper[4953]: I1011 03:53:14.885934 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerStarted","Data":"22e19b26db205018d896de97f5d4009211dcef00b8dab5111aa9dec3c81fc195"} Oct 11 03:53:15 crc kubenswrapper[4953]: I1011 03:53:15.898594 4953 generic.go:334] "Generic (PLEG): container finished" podID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerID="81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e" exitCode=0 Oct 11 03:53:15 crc kubenswrapper[4953]: I1011 03:53:15.898639 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerDied","Data":"81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e"} Oct 11 03:53:21 crc kubenswrapper[4953]: I1011 03:53:21.955670 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerStarted","Data":"01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b"} Oct 11 03:53:22 crc kubenswrapper[4953]: I1011 03:53:22.964720 4953 generic.go:334] "Generic (PLEG): container finished" podID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerID="01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b" exitCode=0 Oct 11 03:53:22 crc kubenswrapper[4953]: I1011 03:53:22.964778 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerDied","Data":"01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b"} Oct 11 03:53:23 crc kubenswrapper[4953]: I1011 03:53:23.996096 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerStarted","Data":"6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520"} Oct 11 03:53:24 crc kubenswrapper[4953]: I1011 03:53:24.028170 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zthj7" podStartSLOduration=3.490161533 podStartE2EDuration="11.028151523s" podCreationTimestamp="2025-10-11 03:53:13 +0000 UTC" firstStartedPulling="2025-10-11 03:53:15.901593471 +0000 UTC m=+4006.834681125" lastFinishedPulling="2025-10-11 03:53:23.439583461 +0000 UTC m=+4014.372671115" observedRunningTime="2025-10-11 03:53:24.017285349 +0000 UTC m=+4014.950373003" watchObservedRunningTime="2025-10-11 03:53:24.028151523 +0000 UTC m=+4014.961239167" Oct 11 03:53:24 crc kubenswrapper[4953]: I1011 03:53:24.100541 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:24 crc kubenswrapper[4953]: I1011 03:53:24.101021 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:25 crc kubenswrapper[4953]: I1011 03:53:25.142285 4953 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-zthj7" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="registry-server" probeResult="failure" output=< Oct 11 03:53:25 crc kubenswrapper[4953]: timeout: failed to connect service ":50051" within 1s Oct 11 03:53:25 crc kubenswrapper[4953]: > Oct 11 03:53:34 crc kubenswrapper[4953]: I1011 03:53:34.149404 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:34 crc kubenswrapper[4953]: I1011 03:53:34.201859 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:34 crc kubenswrapper[4953]: I1011 03:53:34.386779 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zthj7"] Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.098392 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zthj7" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="registry-server" containerID="cri-o://6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520" gracePeriod=2 Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.861107 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.938653 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-catalog-content\") pod \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.938738 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtsx2\" (UniqueName: \"kubernetes.io/projected/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-kube-api-access-wtsx2\") pod \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.938863 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-utilities\") pod \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\" (UID: \"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e\") " Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.939943 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-utilities" (OuterVolumeSpecName: "utilities") pod "428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" (UID: "428b7ac6-fb15-43c0-a30b-2ef2cff1d97e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.945029 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-kube-api-access-wtsx2" (OuterVolumeSpecName: "kube-api-access-wtsx2") pod "428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" (UID: "428b7ac6-fb15-43c0-a30b-2ef2cff1d97e"). InnerVolumeSpecName "kube-api-access-wtsx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:53:36 crc kubenswrapper[4953]: I1011 03:53:36.991399 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" (UID: "428b7ac6-fb15-43c0-a30b-2ef2cff1d97e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.040791 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.040824 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtsx2\" (UniqueName: \"kubernetes.io/projected/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-kube-api-access-wtsx2\") on node \"crc\" DevicePath \"\"" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.040835 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.108086 4953 generic.go:334] "Generic (PLEG): container finished" podID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerID="6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520" exitCode=0 Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.108128 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerDied","Data":"6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520"} Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.108159 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zthj7" event={"ID":"428b7ac6-fb15-43c0-a30b-2ef2cff1d97e","Type":"ContainerDied","Data":"22e19b26db205018d896de97f5d4009211dcef00b8dab5111aa9dec3c81fc195"} Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.108176 4953 scope.go:117] "RemoveContainer" containerID="6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.108188 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zthj7" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.141729 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zthj7"] Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.141794 4953 scope.go:117] "RemoveContainer" containerID="01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.150120 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zthj7"] Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.165591 4953 scope.go:117] "RemoveContainer" containerID="81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.210807 4953 scope.go:117] "RemoveContainer" containerID="6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520" Oct 11 03:53:37 crc kubenswrapper[4953]: E1011 03:53:37.211545 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520\": container with ID starting with 6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520 not found: ID does not exist" containerID="6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.211579 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520"} err="failed to get container status \"6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520\": rpc error: code = NotFound desc = could not find container \"6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520\": container with ID starting with 6c28ea9bf1869732dd7e3d6a3803ec9b7d184027b2d38926f58b3bc7ebf41520 not found: ID does not exist" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.211612 4953 scope.go:117] "RemoveContainer" containerID="01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b" Oct 11 03:53:37 crc kubenswrapper[4953]: E1011 03:53:37.211980 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b\": container with ID starting with 01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b not found: ID does not exist" containerID="01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.212031 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b"} err="failed to get container status \"01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b\": rpc error: code = NotFound desc = could not find container \"01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b\": container with ID starting with 01100cfbf25b00a52e1fd0a07252e042ebffecacf07eb0b5a6d5b6cbfb92589b not found: ID does not exist" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.212059 4953 scope.go:117] "RemoveContainer" containerID="81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e" Oct 11 03:53:37 crc kubenswrapper[4953]: E1011 03:53:37.212467 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e\": container with ID starting with 81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e not found: ID does not exist" containerID="81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.212529 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e"} err="failed to get container status \"81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e\": rpc error: code = NotFound desc = could not find container \"81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e\": container with ID starting with 81b5482e41a92e15cb2993c9a3611a638e0ff9200515125a5194bc1c37579d7e not found: ID does not exist" Oct 11 03:53:37 crc kubenswrapper[4953]: I1011 03:53:37.806064 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" path="/var/lib/kubelet/pods/428b7ac6-fb15-43c0-a30b-2ef2cff1d97e/volumes" Oct 11 03:54:11 crc kubenswrapper[4953]: I1011 03:54:11.316497 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:54:11 crc kubenswrapper[4953]: I1011 03:54:11.317079 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:54:41 crc kubenswrapper[4953]: I1011 03:54:41.316760 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:54:41 crc kubenswrapper[4953]: I1011 03:54:41.317297 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:55:11 crc kubenswrapper[4953]: I1011 03:55:11.316692 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:55:11 crc kubenswrapper[4953]: I1011 03:55:11.317142 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:55:11 crc kubenswrapper[4953]: I1011 03:55:11.317184 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:55:11 crc kubenswrapper[4953]: I1011 03:55:11.318035 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c81bf0695abb2418d6c1a90a17daf9013b1607bf2ff85794c178b8ee34be090"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:55:11 crc kubenswrapper[4953]: I1011 03:55:11.318090 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://8c81bf0695abb2418d6c1a90a17daf9013b1607bf2ff85794c178b8ee34be090" gracePeriod=600 Oct 11 03:55:12 crc kubenswrapper[4953]: I1011 03:55:12.040255 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="8c81bf0695abb2418d6c1a90a17daf9013b1607bf2ff85794c178b8ee34be090" exitCode=0 Oct 11 03:55:12 crc kubenswrapper[4953]: I1011 03:55:12.040303 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"8c81bf0695abb2418d6c1a90a17daf9013b1607bf2ff85794c178b8ee34be090"} Oct 11 03:55:12 crc kubenswrapper[4953]: I1011 03:55:12.041077 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be"} Oct 11 03:55:12 crc kubenswrapper[4953]: I1011 03:55:12.041103 4953 scope.go:117] "RemoveContainer" containerID="cddae0ecf2ca6703f852c2e204a9e983a0576b317e4d6b522b036a5cf4c770cc" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.356390 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8cl5s"] Oct 11 03:56:22 crc kubenswrapper[4953]: E1011 03:56:22.357289 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="extract-utilities" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.357303 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="extract-utilities" Oct 11 03:56:22 crc kubenswrapper[4953]: E1011 03:56:22.357330 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="extract-content" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.357336 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="extract-content" Oct 11 03:56:22 crc kubenswrapper[4953]: E1011 03:56:22.357348 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="registry-server" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.357354 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="registry-server" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.357524 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="428b7ac6-fb15-43c0-a30b-2ef2cff1d97e" containerName="registry-server" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.362321 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.367182 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8cl5s"] Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.452996 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxfvd\" (UniqueName: \"kubernetes.io/projected/f9eec1af-1c53-4224-930b-21e249037acd-kube-api-access-bxfvd\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.453528 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-utilities\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.453595 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-catalog-content\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.555964 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-utilities\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.556021 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-catalog-content\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.556121 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxfvd\" (UniqueName: \"kubernetes.io/projected/f9eec1af-1c53-4224-930b-21e249037acd-kube-api-access-bxfvd\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.556594 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-utilities\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.556817 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-catalog-content\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.586882 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxfvd\" (UniqueName: \"kubernetes.io/projected/f9eec1af-1c53-4224-930b-21e249037acd-kube-api-access-bxfvd\") pod \"redhat-marketplace-8cl5s\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:22 crc kubenswrapper[4953]: I1011 03:56:22.685472 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:23 crc kubenswrapper[4953]: I1011 03:56:23.122411 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8cl5s"] Oct 11 03:56:23 crc kubenswrapper[4953]: I1011 03:56:23.703108 4953 generic.go:334] "Generic (PLEG): container finished" podID="f9eec1af-1c53-4224-930b-21e249037acd" containerID="17fd24b5071c02bfe5376d16668ed7ab84f7fbfa0f6ce39608a58dd0451df093" exitCode=0 Oct 11 03:56:23 crc kubenswrapper[4953]: I1011 03:56:23.703191 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8cl5s" event={"ID":"f9eec1af-1c53-4224-930b-21e249037acd","Type":"ContainerDied","Data":"17fd24b5071c02bfe5376d16668ed7ab84f7fbfa0f6ce39608a58dd0451df093"} Oct 11 03:56:23 crc kubenswrapper[4953]: I1011 03:56:23.703637 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8cl5s" event={"ID":"f9eec1af-1c53-4224-930b-21e249037acd","Type":"ContainerStarted","Data":"9c65bf19bcb543f1264370d772fade3b25466812a1ad0a730a7dddf8d81f1b8d"} Oct 11 03:56:25 crc kubenswrapper[4953]: I1011 03:56:25.727472 4953 generic.go:334] "Generic (PLEG): container finished" podID="f9eec1af-1c53-4224-930b-21e249037acd" containerID="b195db40fcbb8f049a831bc55e63f31fd26074d5131d19f2ef0d3863980a31f2" exitCode=0 Oct 11 03:56:25 crc kubenswrapper[4953]: I1011 03:56:25.727674 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8cl5s" event={"ID":"f9eec1af-1c53-4224-930b-21e249037acd","Type":"ContainerDied","Data":"b195db40fcbb8f049a831bc55e63f31fd26074d5131d19f2ef0d3863980a31f2"} Oct 11 03:56:26 crc kubenswrapper[4953]: I1011 03:56:26.739176 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8cl5s" event={"ID":"f9eec1af-1c53-4224-930b-21e249037acd","Type":"ContainerStarted","Data":"84fc6a3b2fc191de23ae0639a76de48ef7dcda88676ca9ea3ebe550930c53680"} Oct 11 03:56:32 crc kubenswrapper[4953]: I1011 03:56:32.685944 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:32 crc kubenswrapper[4953]: I1011 03:56:32.686431 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:32 crc kubenswrapper[4953]: I1011 03:56:32.729271 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:32 crc kubenswrapper[4953]: I1011 03:56:32.752325 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8cl5s" podStartSLOduration=8.241901788 podStartE2EDuration="10.752298792s" podCreationTimestamp="2025-10-11 03:56:22 +0000 UTC" firstStartedPulling="2025-10-11 03:56:23.705284596 +0000 UTC m=+4194.638372230" lastFinishedPulling="2025-10-11 03:56:26.2156816 +0000 UTC m=+4197.148769234" observedRunningTime="2025-10-11 03:56:26.757301177 +0000 UTC m=+4197.690388811" watchObservedRunningTime="2025-10-11 03:56:32.752298792 +0000 UTC m=+4203.685386436" Oct 11 03:56:32 crc kubenswrapper[4953]: I1011 03:56:32.888095 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:32 crc kubenswrapper[4953]: I1011 03:56:32.966031 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8cl5s"] Oct 11 03:56:34 crc kubenswrapper[4953]: I1011 03:56:34.857913 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8cl5s" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="registry-server" containerID="cri-o://84fc6a3b2fc191de23ae0639a76de48ef7dcda88676ca9ea3ebe550930c53680" gracePeriod=2 Oct 11 03:56:35 crc kubenswrapper[4953]: I1011 03:56:35.872037 4953 generic.go:334] "Generic (PLEG): container finished" podID="f9eec1af-1c53-4224-930b-21e249037acd" containerID="84fc6a3b2fc191de23ae0639a76de48ef7dcda88676ca9ea3ebe550930c53680" exitCode=0 Oct 11 03:56:35 crc kubenswrapper[4953]: I1011 03:56:35.872133 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8cl5s" event={"ID":"f9eec1af-1c53-4224-930b-21e249037acd","Type":"ContainerDied","Data":"84fc6a3b2fc191de23ae0639a76de48ef7dcda88676ca9ea3ebe550930c53680"} Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.086797 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.253656 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-utilities\") pod \"f9eec1af-1c53-4224-930b-21e249037acd\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.253734 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxfvd\" (UniqueName: \"kubernetes.io/projected/f9eec1af-1c53-4224-930b-21e249037acd-kube-api-access-bxfvd\") pod \"f9eec1af-1c53-4224-930b-21e249037acd\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.254887 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-catalog-content\") pod \"f9eec1af-1c53-4224-930b-21e249037acd\" (UID: \"f9eec1af-1c53-4224-930b-21e249037acd\") " Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.255322 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-utilities" (OuterVolumeSpecName: "utilities") pod "f9eec1af-1c53-4224-930b-21e249037acd" (UID: "f9eec1af-1c53-4224-930b-21e249037acd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.255974 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.265931 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9eec1af-1c53-4224-930b-21e249037acd-kube-api-access-bxfvd" (OuterVolumeSpecName: "kube-api-access-bxfvd") pod "f9eec1af-1c53-4224-930b-21e249037acd" (UID: "f9eec1af-1c53-4224-930b-21e249037acd"). InnerVolumeSpecName "kube-api-access-bxfvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.268416 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9eec1af-1c53-4224-930b-21e249037acd" (UID: "f9eec1af-1c53-4224-930b-21e249037acd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.357512 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxfvd\" (UniqueName: \"kubernetes.io/projected/f9eec1af-1c53-4224-930b-21e249037acd-kube-api-access-bxfvd\") on node \"crc\" DevicePath \"\"" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.357544 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9eec1af-1c53-4224-930b-21e249037acd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.885783 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8cl5s" event={"ID":"f9eec1af-1c53-4224-930b-21e249037acd","Type":"ContainerDied","Data":"9c65bf19bcb543f1264370d772fade3b25466812a1ad0a730a7dddf8d81f1b8d"} Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.886724 4953 scope.go:117] "RemoveContainer" containerID="84fc6a3b2fc191de23ae0639a76de48ef7dcda88676ca9ea3ebe550930c53680" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.885884 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8cl5s" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.923253 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8cl5s"] Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.923596 4953 scope.go:117] "RemoveContainer" containerID="b195db40fcbb8f049a831bc55e63f31fd26074d5131d19f2ef0d3863980a31f2" Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.930974 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8cl5s"] Oct 11 03:56:36 crc kubenswrapper[4953]: I1011 03:56:36.955000 4953 scope.go:117] "RemoveContainer" containerID="17fd24b5071c02bfe5376d16668ed7ab84f7fbfa0f6ce39608a58dd0451df093" Oct 11 03:56:37 crc kubenswrapper[4953]: I1011 03:56:37.806974 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9eec1af-1c53-4224-930b-21e249037acd" path="/var/lib/kubelet/pods/f9eec1af-1c53-4224-930b-21e249037acd/volumes" Oct 11 03:57:11 crc kubenswrapper[4953]: I1011 03:57:11.316986 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:57:11 crc kubenswrapper[4953]: I1011 03:57:11.317537 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:57:41 crc kubenswrapper[4953]: I1011 03:57:41.317163 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:57:41 crc kubenswrapper[4953]: I1011 03:57:41.318246 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.316581 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.317188 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.317266 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.318330 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.318421 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" gracePeriod=600 Oct 11 03:58:11 crc kubenswrapper[4953]: E1011 03:58:11.453111 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.870445 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" exitCode=0 Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.870489 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be"} Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.870543 4953 scope.go:117] "RemoveContainer" containerID="8c81bf0695abb2418d6c1a90a17daf9013b1607bf2ff85794c178b8ee34be090" Oct 11 03:58:11 crc kubenswrapper[4953]: I1011 03:58:11.871259 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:58:11 crc kubenswrapper[4953]: E1011 03:58:11.871538 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.277169 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-shlgk"] Oct 11 03:58:21 crc kubenswrapper[4953]: E1011 03:58:21.278381 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="extract-content" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.278402 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="extract-content" Oct 11 03:58:21 crc kubenswrapper[4953]: E1011 03:58:21.278453 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="extract-utilities" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.278466 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="extract-utilities" Oct 11 03:58:21 crc kubenswrapper[4953]: E1011 03:58:21.278491 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="registry-server" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.278506 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="registry-server" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.278814 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9eec1af-1c53-4224-930b-21e249037acd" containerName="registry-server" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.281294 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.292045 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-shlgk"] Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.331371 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-utilities\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.331516 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww8ln\" (UniqueName: \"kubernetes.io/projected/cbb0f458-0ee7-48ca-9927-c4be0766641f-kube-api-access-ww8ln\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.331588 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-catalog-content\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.434118 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-catalog-content\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.434333 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-utilities\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.434468 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww8ln\" (UniqueName: \"kubernetes.io/projected/cbb0f458-0ee7-48ca-9927-c4be0766641f-kube-api-access-ww8ln\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.434583 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-catalog-content\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.435000 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-utilities\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.457620 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww8ln\" (UniqueName: \"kubernetes.io/projected/cbb0f458-0ee7-48ca-9927-c4be0766641f-kube-api-access-ww8ln\") pod \"redhat-operators-shlgk\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:21 crc kubenswrapper[4953]: I1011 03:58:21.610435 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:22 crc kubenswrapper[4953]: I1011 03:58:22.090974 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-shlgk"] Oct 11 03:58:22 crc kubenswrapper[4953]: I1011 03:58:22.795190 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:58:22 crc kubenswrapper[4953]: E1011 03:58:22.795860 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:58:22 crc kubenswrapper[4953]: I1011 03:58:22.989130 4953 generic.go:334] "Generic (PLEG): container finished" podID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerID="22f1fb4e397a527ee717b9c1f406336550634afa9cd2383c663a68b25efdc26d" exitCode=0 Oct 11 03:58:22 crc kubenswrapper[4953]: I1011 03:58:22.989189 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerDied","Data":"22f1fb4e397a527ee717b9c1f406336550634afa9cd2383c663a68b25efdc26d"} Oct 11 03:58:22 crc kubenswrapper[4953]: I1011 03:58:22.989843 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerStarted","Data":"ad11f501318232ff64af958979aafc17825683665722de6f6b867f005469961c"} Oct 11 03:58:22 crc kubenswrapper[4953]: I1011 03:58:22.994631 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 03:58:23 crc kubenswrapper[4953]: I1011 03:58:23.999374 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerStarted","Data":"a6c000c2254d4d295200fa75a3ddccdbcef43b1853f9fdc4cf056ec3b52b1913"} Oct 11 03:58:25 crc kubenswrapper[4953]: I1011 03:58:25.021519 4953 generic.go:334] "Generic (PLEG): container finished" podID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerID="a6c000c2254d4d295200fa75a3ddccdbcef43b1853f9fdc4cf056ec3b52b1913" exitCode=0 Oct 11 03:58:25 crc kubenswrapper[4953]: I1011 03:58:25.021813 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerDied","Data":"a6c000c2254d4d295200fa75a3ddccdbcef43b1853f9fdc4cf056ec3b52b1913"} Oct 11 03:58:26 crc kubenswrapper[4953]: I1011 03:58:26.032734 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerStarted","Data":"9966e8616500cf424aa4f5a26df6c0d3e6c484a5061cebc61fa3227b76fe5da4"} Oct 11 03:58:26 crc kubenswrapper[4953]: I1011 03:58:26.053361 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-shlgk" podStartSLOduration=2.389487546 podStartE2EDuration="5.053341752s" podCreationTimestamp="2025-10-11 03:58:21 +0000 UTC" firstStartedPulling="2025-10-11 03:58:22.994402127 +0000 UTC m=+4313.927489771" lastFinishedPulling="2025-10-11 03:58:25.658256333 +0000 UTC m=+4316.591343977" observedRunningTime="2025-10-11 03:58:26.051471145 +0000 UTC m=+4316.984558819" watchObservedRunningTime="2025-10-11 03:58:26.053341752 +0000 UTC m=+4316.986429386" Oct 11 03:58:31 crc kubenswrapper[4953]: I1011 03:58:31.612261 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:31 crc kubenswrapper[4953]: I1011 03:58:31.612767 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:31 crc kubenswrapper[4953]: I1011 03:58:31.655982 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:32 crc kubenswrapper[4953]: I1011 03:58:32.203054 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:32 crc kubenswrapper[4953]: I1011 03:58:32.249571 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-shlgk"] Oct 11 03:58:34 crc kubenswrapper[4953]: I1011 03:58:34.096049 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-shlgk" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="registry-server" containerID="cri-o://9966e8616500cf424aa4f5a26df6c0d3e6c484a5061cebc61fa3227b76fe5da4" gracePeriod=2 Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.115109 4953 generic.go:334] "Generic (PLEG): container finished" podID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerID="9966e8616500cf424aa4f5a26df6c0d3e6c484a5061cebc61fa3227b76fe5da4" exitCode=0 Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.115240 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerDied","Data":"9966e8616500cf424aa4f5a26df6c0d3e6c484a5061cebc61fa3227b76fe5da4"} Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.116213 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shlgk" event={"ID":"cbb0f458-0ee7-48ca-9927-c4be0766641f","Type":"ContainerDied","Data":"ad11f501318232ff64af958979aafc17825683665722de6f6b867f005469961c"} Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.116240 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad11f501318232ff64af958979aafc17825683665722de6f6b867f005469961c" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.160216 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.268585 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-catalog-content\") pod \"cbb0f458-0ee7-48ca-9927-c4be0766641f\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.268830 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww8ln\" (UniqueName: \"kubernetes.io/projected/cbb0f458-0ee7-48ca-9927-c4be0766641f-kube-api-access-ww8ln\") pod \"cbb0f458-0ee7-48ca-9927-c4be0766641f\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.268886 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-utilities\") pod \"cbb0f458-0ee7-48ca-9927-c4be0766641f\" (UID: \"cbb0f458-0ee7-48ca-9927-c4be0766641f\") " Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.269791 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-utilities" (OuterVolumeSpecName: "utilities") pod "cbb0f458-0ee7-48ca-9927-c4be0766641f" (UID: "cbb0f458-0ee7-48ca-9927-c4be0766641f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.276831 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbb0f458-0ee7-48ca-9927-c4be0766641f-kube-api-access-ww8ln" (OuterVolumeSpecName: "kube-api-access-ww8ln") pod "cbb0f458-0ee7-48ca-9927-c4be0766641f" (UID: "cbb0f458-0ee7-48ca-9927-c4be0766641f"). InnerVolumeSpecName "kube-api-access-ww8ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.364563 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cbb0f458-0ee7-48ca-9927-c4be0766641f" (UID: "cbb0f458-0ee7-48ca-9927-c4be0766641f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.371393 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.371426 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbb0f458-0ee7-48ca-9927-c4be0766641f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 03:58:36 crc kubenswrapper[4953]: I1011 03:58:36.371437 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww8ln\" (UniqueName: \"kubernetes.io/projected/cbb0f458-0ee7-48ca-9927-c4be0766641f-kube-api-access-ww8ln\") on node \"crc\" DevicePath \"\"" Oct 11 03:58:37 crc kubenswrapper[4953]: I1011 03:58:37.122891 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shlgk" Oct 11 03:58:37 crc kubenswrapper[4953]: I1011 03:58:37.155352 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-shlgk"] Oct 11 03:58:37 crc kubenswrapper[4953]: I1011 03:58:37.168328 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-shlgk"] Oct 11 03:58:37 crc kubenswrapper[4953]: I1011 03:58:37.795969 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:58:37 crc kubenswrapper[4953]: E1011 03:58:37.797045 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:58:37 crc kubenswrapper[4953]: I1011 03:58:37.809278 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" path="/var/lib/kubelet/pods/cbb0f458-0ee7-48ca-9927-c4be0766641f/volumes" Oct 11 03:58:50 crc kubenswrapper[4953]: I1011 03:58:50.795765 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:58:50 crc kubenswrapper[4953]: E1011 03:58:50.796774 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:59:05 crc kubenswrapper[4953]: I1011 03:59:05.801403 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:59:05 crc kubenswrapper[4953]: E1011 03:59:05.803059 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:59:20 crc kubenswrapper[4953]: I1011 03:59:20.796035 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:59:20 crc kubenswrapper[4953]: E1011 03:59:20.797280 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:59:34 crc kubenswrapper[4953]: I1011 03:59:34.796241 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:59:34 crc kubenswrapper[4953]: E1011 03:59:34.798276 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:59:46 crc kubenswrapper[4953]: I1011 03:59:46.796225 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:59:46 crc kubenswrapper[4953]: E1011 03:59:46.797275 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 03:59:59 crc kubenswrapper[4953]: I1011 03:59:59.801169 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 03:59:59 crc kubenswrapper[4953]: E1011 03:59:59.802220 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.151858 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx"] Oct 11 04:00:00 crc kubenswrapper[4953]: E1011 04:00:00.152297 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="extract-content" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.152317 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="extract-content" Oct 11 04:00:00 crc kubenswrapper[4953]: E1011 04:00:00.152346 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="extract-utilities" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.152353 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="extract-utilities" Oct 11 04:00:00 crc kubenswrapper[4953]: E1011 04:00:00.152375 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="registry-server" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.152382 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="registry-server" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.152562 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbb0f458-0ee7-48ca-9927-c4be0766641f" containerName="registry-server" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.153224 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.155222 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.155232 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.186170 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx"] Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.255787 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e03d4745-93cb-4845-b4f8-b2ba75236d6d-config-volume\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.255842 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rkkj\" (UniqueName: \"kubernetes.io/projected/e03d4745-93cb-4845-b4f8-b2ba75236d6d-kube-api-access-9rkkj\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.256356 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e03d4745-93cb-4845-b4f8-b2ba75236d6d-secret-volume\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.360641 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e03d4745-93cb-4845-b4f8-b2ba75236d6d-secret-volume\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.360731 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e03d4745-93cb-4845-b4f8-b2ba75236d6d-config-volume\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.360776 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rkkj\" (UniqueName: \"kubernetes.io/projected/e03d4745-93cb-4845-b4f8-b2ba75236d6d-kube-api-access-9rkkj\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.361810 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e03d4745-93cb-4845-b4f8-b2ba75236d6d-config-volume\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.376520 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e03d4745-93cb-4845-b4f8-b2ba75236d6d-secret-volume\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.376743 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rkkj\" (UniqueName: \"kubernetes.io/projected/e03d4745-93cb-4845-b4f8-b2ba75236d6d-kube-api-access-9rkkj\") pod \"collect-profiles-29335920-825wx\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.482281 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:00 crc kubenswrapper[4953]: I1011 04:00:00.911158 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx"] Oct 11 04:00:01 crc kubenswrapper[4953]: I1011 04:00:01.921104 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" event={"ID":"e03d4745-93cb-4845-b4f8-b2ba75236d6d","Type":"ContainerStarted","Data":"1ade697a7b6b6676514fbb1c53dc77a2e729579562a5241ae3237301528256e6"} Oct 11 04:00:01 crc kubenswrapper[4953]: I1011 04:00:01.921726 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" event={"ID":"e03d4745-93cb-4845-b4f8-b2ba75236d6d","Type":"ContainerStarted","Data":"6f5fd8790215cddc443b8b2f4fb1a10da04f4f9fceea5408f5cfe782edbcfab6"} Oct 11 04:00:01 crc kubenswrapper[4953]: I1011 04:00:01.940369 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" podStartSLOduration=1.940350042 podStartE2EDuration="1.940350042s" podCreationTimestamp="2025-10-11 04:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:00:01.937999093 +0000 UTC m=+4412.871086737" watchObservedRunningTime="2025-10-11 04:00:01.940350042 +0000 UTC m=+4412.873437686" Oct 11 04:00:02 crc kubenswrapper[4953]: E1011 04:00:02.135064 4953 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode03d4745_93cb_4845_b4f8_b2ba75236d6d.slice/crio-1ade697a7b6b6676514fbb1c53dc77a2e729579562a5241ae3237301528256e6.scope\": RecentStats: unable to find data in memory cache]" Oct 11 04:00:02 crc kubenswrapper[4953]: I1011 04:00:02.931000 4953 generic.go:334] "Generic (PLEG): container finished" podID="e03d4745-93cb-4845-b4f8-b2ba75236d6d" containerID="1ade697a7b6b6676514fbb1c53dc77a2e729579562a5241ae3237301528256e6" exitCode=0 Oct 11 04:00:02 crc kubenswrapper[4953]: I1011 04:00:02.931142 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" event={"ID":"e03d4745-93cb-4845-b4f8-b2ba75236d6d","Type":"ContainerDied","Data":"1ade697a7b6b6676514fbb1c53dc77a2e729579562a5241ae3237301528256e6"} Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.412361 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.450889 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rkkj\" (UniqueName: \"kubernetes.io/projected/e03d4745-93cb-4845-b4f8-b2ba75236d6d-kube-api-access-9rkkj\") pod \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.450977 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e03d4745-93cb-4845-b4f8-b2ba75236d6d-config-volume\") pod \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.451097 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e03d4745-93cb-4845-b4f8-b2ba75236d6d-secret-volume\") pod \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\" (UID: \"e03d4745-93cb-4845-b4f8-b2ba75236d6d\") " Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.452151 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e03d4745-93cb-4845-b4f8-b2ba75236d6d-config-volume" (OuterVolumeSpecName: "config-volume") pod "e03d4745-93cb-4845-b4f8-b2ba75236d6d" (UID: "e03d4745-93cb-4845-b4f8-b2ba75236d6d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.459853 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e03d4745-93cb-4845-b4f8-b2ba75236d6d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e03d4745-93cb-4845-b4f8-b2ba75236d6d" (UID: "e03d4745-93cb-4845-b4f8-b2ba75236d6d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.459935 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e03d4745-93cb-4845-b4f8-b2ba75236d6d-kube-api-access-9rkkj" (OuterVolumeSpecName: "kube-api-access-9rkkj") pod "e03d4745-93cb-4845-b4f8-b2ba75236d6d" (UID: "e03d4745-93cb-4845-b4f8-b2ba75236d6d"). InnerVolumeSpecName "kube-api-access-9rkkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.552993 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e03d4745-93cb-4845-b4f8-b2ba75236d6d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.553038 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rkkj\" (UniqueName: \"kubernetes.io/projected/e03d4745-93cb-4845-b4f8-b2ba75236d6d-kube-api-access-9rkkj\") on node \"crc\" DevicePath \"\"" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.553048 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e03d4745-93cb-4845-b4f8-b2ba75236d6d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.952470 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" event={"ID":"e03d4745-93cb-4845-b4f8-b2ba75236d6d","Type":"ContainerDied","Data":"6f5fd8790215cddc443b8b2f4fb1a10da04f4f9fceea5408f5cfe782edbcfab6"} Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.952508 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335920-825wx" Oct 11 04:00:04 crc kubenswrapper[4953]: I1011 04:00:04.952518 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f5fd8790215cddc443b8b2f4fb1a10da04f4f9fceea5408f5cfe782edbcfab6" Oct 11 04:00:05 crc kubenswrapper[4953]: I1011 04:00:05.015355 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g"] Oct 11 04:00:05 crc kubenswrapper[4953]: I1011 04:00:05.025923 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335875-gk84g"] Oct 11 04:00:05 crc kubenswrapper[4953]: I1011 04:00:05.808197 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50a20ae2-84b9-427c-a9be-419da263288a" path="/var/lib/kubelet/pods/50a20ae2-84b9-427c-a9be-419da263288a/volumes" Oct 11 04:00:11 crc kubenswrapper[4953]: I1011 04:00:11.795726 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:00:11 crc kubenswrapper[4953]: E1011 04:00:11.796745 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:00:22 crc kubenswrapper[4953]: I1011 04:00:22.795735 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:00:22 crc kubenswrapper[4953]: E1011 04:00:22.799044 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:00:35 crc kubenswrapper[4953]: I1011 04:00:35.795913 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:00:35 crc kubenswrapper[4953]: E1011 04:00:35.796619 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:00:49 crc kubenswrapper[4953]: I1011 04:00:49.802084 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:00:49 crc kubenswrapper[4953]: E1011 04:00:49.803896 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:00:53 crc kubenswrapper[4953]: I1011 04:00:53.258131 4953 scope.go:117] "RemoveContainer" containerID="d663d9b16b41ee640d64a358fc6a6bda62c863f6ab13ff1b7e9039822521cd56" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.152799 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29335921-stdzs"] Oct 11 04:01:00 crc kubenswrapper[4953]: E1011 04:01:00.153721 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e03d4745-93cb-4845-b4f8-b2ba75236d6d" containerName="collect-profiles" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.153739 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e03d4745-93cb-4845-b4f8-b2ba75236d6d" containerName="collect-profiles" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.153949 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e03d4745-93cb-4845-b4f8-b2ba75236d6d" containerName="collect-profiles" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.154654 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.169818 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-fernet-keys\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.169925 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldwmp\" (UniqueName: \"kubernetes.io/projected/e81fc29c-6896-43b1-b77f-a03449849738-kube-api-access-ldwmp\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.170008 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-config-data\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.170191 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-combined-ca-bundle\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.178858 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29335921-stdzs"] Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.272156 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-combined-ca-bundle\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.272416 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-fernet-keys\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.272472 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldwmp\" (UniqueName: \"kubernetes.io/projected/e81fc29c-6896-43b1-b77f-a03449849738-kube-api-access-ldwmp\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.272510 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-config-data\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.655445 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-combined-ca-bundle\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.655498 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-fernet-keys\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.655932 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldwmp\" (UniqueName: \"kubernetes.io/projected/e81fc29c-6896-43b1-b77f-a03449849738-kube-api-access-ldwmp\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.659038 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-config-data\") pod \"keystone-cron-29335921-stdzs\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:00 crc kubenswrapper[4953]: I1011 04:01:00.776802 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:01 crc kubenswrapper[4953]: I1011 04:01:01.239653 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29335921-stdzs"] Oct 11 04:01:01 crc kubenswrapper[4953]: I1011 04:01:01.536011 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335921-stdzs" event={"ID":"e81fc29c-6896-43b1-b77f-a03449849738","Type":"ContainerStarted","Data":"79b14fb651192f3d55e9f18de17646374e2bb81bbd614d7f07db2d5bca952918"} Oct 11 04:01:01 crc kubenswrapper[4953]: I1011 04:01:01.536295 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335921-stdzs" event={"ID":"e81fc29c-6896-43b1-b77f-a03449849738","Type":"ContainerStarted","Data":"0fd875613852b18d0fdf28d906b554ff9bd535cd0e8fbbaa187efd76f3cc2cd4"} Oct 11 04:01:01 crc kubenswrapper[4953]: I1011 04:01:01.563283 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29335921-stdzs" podStartSLOduration=1.563261855 podStartE2EDuration="1.563261855s" podCreationTimestamp="2025-10-11 04:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:01:01.550029712 +0000 UTC m=+4472.483117356" watchObservedRunningTime="2025-10-11 04:01:01.563261855 +0000 UTC m=+4472.496349509" Oct 11 04:01:01 crc kubenswrapper[4953]: I1011 04:01:01.795160 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:01:01 crc kubenswrapper[4953]: E1011 04:01:01.795507 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:01:04 crc kubenswrapper[4953]: I1011 04:01:04.558354 4953 generic.go:334] "Generic (PLEG): container finished" podID="e81fc29c-6896-43b1-b77f-a03449849738" containerID="79b14fb651192f3d55e9f18de17646374e2bb81bbd614d7f07db2d5bca952918" exitCode=0 Oct 11 04:01:04 crc kubenswrapper[4953]: I1011 04:01:04.558458 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335921-stdzs" event={"ID":"e81fc29c-6896-43b1-b77f-a03449849738","Type":"ContainerDied","Data":"79b14fb651192f3d55e9f18de17646374e2bb81bbd614d7f07db2d5bca952918"} Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.106366 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.218406 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldwmp\" (UniqueName: \"kubernetes.io/projected/e81fc29c-6896-43b1-b77f-a03449849738-kube-api-access-ldwmp\") pod \"e81fc29c-6896-43b1-b77f-a03449849738\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.218486 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-combined-ca-bundle\") pod \"e81fc29c-6896-43b1-b77f-a03449849738\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.218634 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-fernet-keys\") pod \"e81fc29c-6896-43b1-b77f-a03449849738\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.218716 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-config-data\") pod \"e81fc29c-6896-43b1-b77f-a03449849738\" (UID: \"e81fc29c-6896-43b1-b77f-a03449849738\") " Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.225874 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e81fc29c-6896-43b1-b77f-a03449849738" (UID: "e81fc29c-6896-43b1-b77f-a03449849738"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.243405 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e81fc29c-6896-43b1-b77f-a03449849738-kube-api-access-ldwmp" (OuterVolumeSpecName: "kube-api-access-ldwmp") pod "e81fc29c-6896-43b1-b77f-a03449849738" (UID: "e81fc29c-6896-43b1-b77f-a03449849738"). InnerVolumeSpecName "kube-api-access-ldwmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.247859 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e81fc29c-6896-43b1-b77f-a03449849738" (UID: "e81fc29c-6896-43b1-b77f-a03449849738"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.293023 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-config-data" (OuterVolumeSpecName: "config-data") pod "e81fc29c-6896-43b1-b77f-a03449849738" (UID: "e81fc29c-6896-43b1-b77f-a03449849738"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.322145 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldwmp\" (UniqueName: \"kubernetes.io/projected/e81fc29c-6896-43b1-b77f-a03449849738-kube-api-access-ldwmp\") on node \"crc\" DevicePath \"\"" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.322184 4953 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.322194 4953 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.322204 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81fc29c-6896-43b1-b77f-a03449849738-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.574311 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29335921-stdzs" event={"ID":"e81fc29c-6896-43b1-b77f-a03449849738","Type":"ContainerDied","Data":"0fd875613852b18d0fdf28d906b554ff9bd535cd0e8fbbaa187efd76f3cc2cd4"} Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.574351 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fd875613852b18d0fdf28d906b554ff9bd535cd0e8fbbaa187efd76f3cc2cd4" Oct 11 04:01:06 crc kubenswrapper[4953]: I1011 04:01:06.574426 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29335921-stdzs" Oct 11 04:01:16 crc kubenswrapper[4953]: I1011 04:01:16.795760 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:01:16 crc kubenswrapper[4953]: E1011 04:01:16.796652 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:01:30 crc kubenswrapper[4953]: I1011 04:01:30.795119 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:01:30 crc kubenswrapper[4953]: E1011 04:01:30.797433 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:01:44 crc kubenswrapper[4953]: I1011 04:01:44.795943 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:01:44 crc kubenswrapper[4953]: E1011 04:01:44.797031 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:01:57 crc kubenswrapper[4953]: I1011 04:01:57.796160 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:01:57 crc kubenswrapper[4953]: E1011 04:01:57.797332 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:02:08 crc kubenswrapper[4953]: I1011 04:02:08.795704 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:02:08 crc kubenswrapper[4953]: E1011 04:02:08.797138 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:02:22 crc kubenswrapper[4953]: I1011 04:02:22.796216 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:02:22 crc kubenswrapper[4953]: E1011 04:02:22.797514 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.412138 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-89xrq"] Oct 11 04:02:31 crc kubenswrapper[4953]: E1011 04:02:31.413265 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e81fc29c-6896-43b1-b77f-a03449849738" containerName="keystone-cron" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.413282 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="e81fc29c-6896-43b1-b77f-a03449849738" containerName="keystone-cron" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.413535 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="e81fc29c-6896-43b1-b77f-a03449849738" containerName="keystone-cron" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.416679 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.430875 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-89xrq"] Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.606120 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-catalog-content\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.606198 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-utilities\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.606277 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt8j6\" (UniqueName: \"kubernetes.io/projected/53d9f3f8-aecd-41de-a547-66bd3214274b-kube-api-access-lt8j6\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.708702 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-catalog-content\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.708781 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-utilities\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.708843 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt8j6\" (UniqueName: \"kubernetes.io/projected/53d9f3f8-aecd-41de-a547-66bd3214274b-kube-api-access-lt8j6\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.709620 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-catalog-content\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.709620 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-utilities\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.750164 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt8j6\" (UniqueName: \"kubernetes.io/projected/53d9f3f8-aecd-41de-a547-66bd3214274b-kube-api-access-lt8j6\") pod \"community-operators-89xrq\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:31 crc kubenswrapper[4953]: I1011 04:02:31.753350 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:32 crc kubenswrapper[4953]: I1011 04:02:32.393768 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-89xrq"] Oct 11 04:02:32 crc kubenswrapper[4953]: W1011 04:02:32.400011 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53d9f3f8_aecd_41de_a547_66bd3214274b.slice/crio-13ccd229ce5c854d1adae827394d5a434cdff828252cde99995da9d4d9ba05ba WatchSource:0}: Error finding container 13ccd229ce5c854d1adae827394d5a434cdff828252cde99995da9d4d9ba05ba: Status 404 returned error can't find the container with id 13ccd229ce5c854d1adae827394d5a434cdff828252cde99995da9d4d9ba05ba Oct 11 04:02:32 crc kubenswrapper[4953]: I1011 04:02:32.463034 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerStarted","Data":"13ccd229ce5c854d1adae827394d5a434cdff828252cde99995da9d4d9ba05ba"} Oct 11 04:02:33 crc kubenswrapper[4953]: I1011 04:02:33.475565 4953 generic.go:334] "Generic (PLEG): container finished" podID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerID="9aabc982801aa34a3051666085185804d9b07be9ae6ab5a57bf9f592a8b2a71a" exitCode=0 Oct 11 04:02:33 crc kubenswrapper[4953]: I1011 04:02:33.475643 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerDied","Data":"9aabc982801aa34a3051666085185804d9b07be9ae6ab5a57bf9f592a8b2a71a"} Oct 11 04:02:35 crc kubenswrapper[4953]: I1011 04:02:35.497056 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerStarted","Data":"f5eb57fc84efb5b5a5c8a349e9f15e82ac20e0c6a991299d535588cbbb9f7178"} Oct 11 04:02:35 crc kubenswrapper[4953]: I1011 04:02:35.795799 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:02:35 crc kubenswrapper[4953]: E1011 04:02:35.796028 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:02:36 crc kubenswrapper[4953]: I1011 04:02:36.508401 4953 generic.go:334] "Generic (PLEG): container finished" podID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerID="f5eb57fc84efb5b5a5c8a349e9f15e82ac20e0c6a991299d535588cbbb9f7178" exitCode=0 Oct 11 04:02:36 crc kubenswrapper[4953]: I1011 04:02:36.508443 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerDied","Data":"f5eb57fc84efb5b5a5c8a349e9f15e82ac20e0c6a991299d535588cbbb9f7178"} Oct 11 04:02:37 crc kubenswrapper[4953]: I1011 04:02:37.518941 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerStarted","Data":"284ea7156e5fc534820e22fc1291f1e400585727c47ec130c704fe4bfd9f7dcb"} Oct 11 04:02:37 crc kubenswrapper[4953]: I1011 04:02:37.540685 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-89xrq" podStartSLOduration=3.062851976 podStartE2EDuration="6.540661247s" podCreationTimestamp="2025-10-11 04:02:31 +0000 UTC" firstStartedPulling="2025-10-11 04:02:33.47864859 +0000 UTC m=+4564.411736234" lastFinishedPulling="2025-10-11 04:02:36.956457861 +0000 UTC m=+4567.889545505" observedRunningTime="2025-10-11 04:02:37.533823815 +0000 UTC m=+4568.466911469" watchObservedRunningTime="2025-10-11 04:02:37.540661247 +0000 UTC m=+4568.473748891" Oct 11 04:02:41 crc kubenswrapper[4953]: I1011 04:02:41.754195 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:41 crc kubenswrapper[4953]: I1011 04:02:41.754692 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:41 crc kubenswrapper[4953]: I1011 04:02:41.807276 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:42 crc kubenswrapper[4953]: I1011 04:02:42.614390 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:42 crc kubenswrapper[4953]: I1011 04:02:42.671449 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-89xrq"] Oct 11 04:02:44 crc kubenswrapper[4953]: I1011 04:02:44.578449 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-89xrq" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="registry-server" containerID="cri-o://284ea7156e5fc534820e22fc1291f1e400585727c47ec130c704fe4bfd9f7dcb" gracePeriod=2 Oct 11 04:02:45 crc kubenswrapper[4953]: I1011 04:02:45.592328 4953 generic.go:334] "Generic (PLEG): container finished" podID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerID="284ea7156e5fc534820e22fc1291f1e400585727c47ec130c704fe4bfd9f7dcb" exitCode=0 Oct 11 04:02:45 crc kubenswrapper[4953]: I1011 04:02:45.592427 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerDied","Data":"284ea7156e5fc534820e22fc1291f1e400585727c47ec130c704fe4bfd9f7dcb"} Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.009245 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.190960 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt8j6\" (UniqueName: \"kubernetes.io/projected/53d9f3f8-aecd-41de-a547-66bd3214274b-kube-api-access-lt8j6\") pod \"53d9f3f8-aecd-41de-a547-66bd3214274b\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.191506 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-utilities\") pod \"53d9f3f8-aecd-41de-a547-66bd3214274b\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.192009 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-catalog-content\") pod \"53d9f3f8-aecd-41de-a547-66bd3214274b\" (UID: \"53d9f3f8-aecd-41de-a547-66bd3214274b\") " Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.192850 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-utilities" (OuterVolumeSpecName: "utilities") pod "53d9f3f8-aecd-41de-a547-66bd3214274b" (UID: "53d9f3f8-aecd-41de-a547-66bd3214274b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.193412 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.251511 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53d9f3f8-aecd-41de-a547-66bd3214274b" (UID: "53d9f3f8-aecd-41de-a547-66bd3214274b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.295143 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d9f3f8-aecd-41de-a547-66bd3214274b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.565995 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53d9f3f8-aecd-41de-a547-66bd3214274b-kube-api-access-lt8j6" (OuterVolumeSpecName: "kube-api-access-lt8j6") pod "53d9f3f8-aecd-41de-a547-66bd3214274b" (UID: "53d9f3f8-aecd-41de-a547-66bd3214274b"). InnerVolumeSpecName "kube-api-access-lt8j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.600346 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt8j6\" (UniqueName: \"kubernetes.io/projected/53d9f3f8-aecd-41de-a547-66bd3214274b-kube-api-access-lt8j6\") on node \"crc\" DevicePath \"\"" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.605743 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-89xrq" event={"ID":"53d9f3f8-aecd-41de-a547-66bd3214274b","Type":"ContainerDied","Data":"13ccd229ce5c854d1adae827394d5a434cdff828252cde99995da9d4d9ba05ba"} Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.605806 4953 scope.go:117] "RemoveContainer" containerID="284ea7156e5fc534820e22fc1291f1e400585727c47ec130c704fe4bfd9f7dcb" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.605914 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-89xrq" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.635282 4953 scope.go:117] "RemoveContainer" containerID="f5eb57fc84efb5b5a5c8a349e9f15e82ac20e0c6a991299d535588cbbb9f7178" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.640663 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-89xrq"] Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.662545 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-89xrq"] Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.663136 4953 scope.go:117] "RemoveContainer" containerID="9aabc982801aa34a3051666085185804d9b07be9ae6ab5a57bf9f592a8b2a71a" Oct 11 04:02:46 crc kubenswrapper[4953]: I1011 04:02:46.795484 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:02:46 crc kubenswrapper[4953]: E1011 04:02:46.796065 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:02:47 crc kubenswrapper[4953]: I1011 04:02:47.805659 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" path="/var/lib/kubelet/pods/53d9f3f8-aecd-41de-a547-66bd3214274b/volumes" Oct 11 04:02:59 crc kubenswrapper[4953]: I1011 04:02:59.801383 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:02:59 crc kubenswrapper[4953]: E1011 04:02:59.802196 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:03:14 crc kubenswrapper[4953]: I1011 04:03:14.795540 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:03:15 crc kubenswrapper[4953]: I1011 04:03:15.881379 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"7a9199e900be11c3b033735100eabbd40478f0f5f3da4ada66080b2a744c1d85"} Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.447375 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cxqhz"] Oct 11 04:04:47 crc kubenswrapper[4953]: E1011 04:04:47.448345 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="extract-utilities" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.448357 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="extract-utilities" Oct 11 04:04:47 crc kubenswrapper[4953]: E1011 04:04:47.448375 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="registry-server" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.448382 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="registry-server" Oct 11 04:04:47 crc kubenswrapper[4953]: E1011 04:04:47.448403 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="extract-content" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.448409 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="extract-content" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.448638 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d9f3f8-aecd-41de-a547-66bd3214274b" containerName="registry-server" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.452147 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.477428 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxqhz"] Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.649843 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-catalog-content\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.650165 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l848z\" (UniqueName: \"kubernetes.io/projected/a3142a17-6a08-47b5-b189-83f1bebf2508-kube-api-access-l848z\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.650301 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-utilities\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.751978 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-catalog-content\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.752040 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l848z\" (UniqueName: \"kubernetes.io/projected/a3142a17-6a08-47b5-b189-83f1bebf2508-kube-api-access-l848z\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.752060 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-utilities\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.752583 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-catalog-content\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.752688 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-utilities\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:47 crc kubenswrapper[4953]: I1011 04:04:47.800647 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l848z\" (UniqueName: \"kubernetes.io/projected/a3142a17-6a08-47b5-b189-83f1bebf2508-kube-api-access-l848z\") pod \"certified-operators-cxqhz\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:48 crc kubenswrapper[4953]: I1011 04:04:48.090030 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:48 crc kubenswrapper[4953]: I1011 04:04:48.373527 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxqhz"] Oct 11 04:04:48 crc kubenswrapper[4953]: I1011 04:04:48.854082 4953 generic.go:334] "Generic (PLEG): container finished" podID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerID="c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34" exitCode=0 Oct 11 04:04:48 crc kubenswrapper[4953]: I1011 04:04:48.854168 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerDied","Data":"c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34"} Oct 11 04:04:48 crc kubenswrapper[4953]: I1011 04:04:48.854220 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerStarted","Data":"aa5872f5043ab2da6e59c4a1df5d0f9e5532e9027b8765cb41f6d1546f829565"} Oct 11 04:04:48 crc kubenswrapper[4953]: I1011 04:04:48.857877 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 04:04:49 crc kubenswrapper[4953]: I1011 04:04:49.875919 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerStarted","Data":"c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579"} Oct 11 04:04:51 crc kubenswrapper[4953]: I1011 04:04:51.894335 4953 generic.go:334] "Generic (PLEG): container finished" podID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerID="c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579" exitCode=0 Oct 11 04:04:51 crc kubenswrapper[4953]: I1011 04:04:51.894393 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerDied","Data":"c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579"} Oct 11 04:04:52 crc kubenswrapper[4953]: I1011 04:04:52.904612 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerStarted","Data":"a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a"} Oct 11 04:04:53 crc kubenswrapper[4953]: I1011 04:04:53.429936 4953 scope.go:117] "RemoveContainer" containerID="22f1fb4e397a527ee717b9c1f406336550634afa9cd2383c663a68b25efdc26d" Oct 11 04:04:53 crc kubenswrapper[4953]: I1011 04:04:53.454746 4953 scope.go:117] "RemoveContainer" containerID="a6c000c2254d4d295200fa75a3ddccdbcef43b1853f9fdc4cf056ec3b52b1913" Oct 11 04:04:53 crc kubenswrapper[4953]: I1011 04:04:53.534904 4953 scope.go:117] "RemoveContainer" containerID="9966e8616500cf424aa4f5a26df6c0d3e6c484a5061cebc61fa3227b76fe5da4" Oct 11 04:04:58 crc kubenswrapper[4953]: I1011 04:04:58.090850 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:58 crc kubenswrapper[4953]: I1011 04:04:58.091381 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:58 crc kubenswrapper[4953]: I1011 04:04:58.158483 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:58 crc kubenswrapper[4953]: I1011 04:04:58.181180 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cxqhz" podStartSLOduration=7.6976629370000005 podStartE2EDuration="11.18115336s" podCreationTimestamp="2025-10-11 04:04:47 +0000 UTC" firstStartedPulling="2025-10-11 04:04:48.857682083 +0000 UTC m=+4699.790769727" lastFinishedPulling="2025-10-11 04:04:52.341172506 +0000 UTC m=+4703.274260150" observedRunningTime="2025-10-11 04:04:52.93599488 +0000 UTC m=+4703.869082534" watchObservedRunningTime="2025-10-11 04:04:58.18115336 +0000 UTC m=+4709.114241004" Oct 11 04:04:59 crc kubenswrapper[4953]: I1011 04:04:59.377800 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:04:59 crc kubenswrapper[4953]: I1011 04:04:59.458289 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxqhz"] Oct 11 04:05:00 crc kubenswrapper[4953]: I1011 04:05:00.980927 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cxqhz" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="registry-server" containerID="cri-o://a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a" gracePeriod=2 Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.626626 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.674914 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l848z\" (UniqueName: \"kubernetes.io/projected/a3142a17-6a08-47b5-b189-83f1bebf2508-kube-api-access-l848z\") pod \"a3142a17-6a08-47b5-b189-83f1bebf2508\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.675026 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-utilities\") pod \"a3142a17-6a08-47b5-b189-83f1bebf2508\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.675322 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-catalog-content\") pod \"a3142a17-6a08-47b5-b189-83f1bebf2508\" (UID: \"a3142a17-6a08-47b5-b189-83f1bebf2508\") " Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.675913 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-utilities" (OuterVolumeSpecName: "utilities") pod "a3142a17-6a08-47b5-b189-83f1bebf2508" (UID: "a3142a17-6a08-47b5-b189-83f1bebf2508"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.702866 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3142a17-6a08-47b5-b189-83f1bebf2508-kube-api-access-l848z" (OuterVolumeSpecName: "kube-api-access-l848z") pod "a3142a17-6a08-47b5-b189-83f1bebf2508" (UID: "a3142a17-6a08-47b5-b189-83f1bebf2508"). InnerVolumeSpecName "kube-api-access-l848z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.734188 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3142a17-6a08-47b5-b189-83f1bebf2508" (UID: "a3142a17-6a08-47b5-b189-83f1bebf2508"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.777479 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.777788 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l848z\" (UniqueName: \"kubernetes.io/projected/a3142a17-6a08-47b5-b189-83f1bebf2508-kube-api-access-l848z\") on node \"crc\" DevicePath \"\"" Oct 11 04:05:01 crc kubenswrapper[4953]: I1011 04:05:01.777927 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3142a17-6a08-47b5-b189-83f1bebf2508-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.041518 4953 generic.go:334] "Generic (PLEG): container finished" podID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerID="a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a" exitCode=0 Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.041562 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerDied","Data":"a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a"} Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.041587 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxqhz" event={"ID":"a3142a17-6a08-47b5-b189-83f1bebf2508","Type":"ContainerDied","Data":"aa5872f5043ab2da6e59c4a1df5d0f9e5532e9027b8765cb41f6d1546f829565"} Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.041625 4953 scope.go:117] "RemoveContainer" containerID="a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.041770 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxqhz" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.116937 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxqhz"] Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.127693 4953 scope.go:117] "RemoveContainer" containerID="c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.149201 4953 scope.go:117] "RemoveContainer" containerID="c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.151763 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cxqhz"] Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.198660 4953 scope.go:117] "RemoveContainer" containerID="a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a" Oct 11 04:05:02 crc kubenswrapper[4953]: E1011 04:05:02.199178 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a\": container with ID starting with a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a not found: ID does not exist" containerID="a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.199208 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a"} err="failed to get container status \"a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a\": rpc error: code = NotFound desc = could not find container \"a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a\": container with ID starting with a8237fabf2d4b0fb4bb771b81448516bf1f2f684c393cf8b154e63fc887e316a not found: ID does not exist" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.199229 4953 scope.go:117] "RemoveContainer" containerID="c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579" Oct 11 04:05:02 crc kubenswrapper[4953]: E1011 04:05:02.199843 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579\": container with ID starting with c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579 not found: ID does not exist" containerID="c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.199892 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579"} err="failed to get container status \"c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579\": rpc error: code = NotFound desc = could not find container \"c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579\": container with ID starting with c539fe2e0367d46311eb6a7e3b5b23757e54770ed91cbd55614ee19416e3c579 not found: ID does not exist" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.199921 4953 scope.go:117] "RemoveContainer" containerID="c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34" Oct 11 04:05:02 crc kubenswrapper[4953]: E1011 04:05:02.200341 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34\": container with ID starting with c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34 not found: ID does not exist" containerID="c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34" Oct 11 04:05:02 crc kubenswrapper[4953]: I1011 04:05:02.200405 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34"} err="failed to get container status \"c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34\": rpc error: code = NotFound desc = could not find container \"c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34\": container with ID starting with c9e27502d7ba8383a82be52e2a6832f05d09b8b913ad976e988d501a12378e34 not found: ID does not exist" Oct 11 04:05:03 crc kubenswrapper[4953]: I1011 04:05:03.806841 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" path="/var/lib/kubelet/pods/a3142a17-6a08-47b5-b189-83f1bebf2508/volumes" Oct 11 04:05:41 crc kubenswrapper[4953]: I1011 04:05:41.317046 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:05:41 crc kubenswrapper[4953]: I1011 04:05:41.317699 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:06:11 crc kubenswrapper[4953]: I1011 04:06:11.316517 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:06:11 crc kubenswrapper[4953]: I1011 04:06:11.318271 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.903014 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4t2pb"] Oct 11 04:06:22 crc kubenswrapper[4953]: E1011 04:06:22.903931 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="extract-utilities" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.903947 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="extract-utilities" Oct 11 04:06:22 crc kubenswrapper[4953]: E1011 04:06:22.903963 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="registry-server" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.903969 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="registry-server" Oct 11 04:06:22 crc kubenswrapper[4953]: E1011 04:06:22.903999 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="extract-content" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.904006 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="extract-content" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.904180 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3142a17-6a08-47b5-b189-83f1bebf2508" containerName="registry-server" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.905719 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.917896 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4t2pb"] Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.998658 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-catalog-content\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.998703 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-utilities\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:22 crc kubenswrapper[4953]: I1011 04:06:22.998816 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g76k4\" (UniqueName: \"kubernetes.io/projected/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-kube-api-access-g76k4\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.100710 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g76k4\" (UniqueName: \"kubernetes.io/projected/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-kube-api-access-g76k4\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.100816 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-catalog-content\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.100838 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-utilities\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.101325 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-utilities\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.101428 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-catalog-content\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.121535 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g76k4\" (UniqueName: \"kubernetes.io/projected/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-kube-api-access-g76k4\") pod \"redhat-marketplace-4t2pb\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.222152 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.696496 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4t2pb"] Oct 11 04:06:23 crc kubenswrapper[4953]: W1011 04:06:23.701907 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d6058e1_e1e9_412f_afbb_d7bf99a7d032.slice/crio-91a1105e427a097c9da02272a5a2938cb3cf3d27115dba96e49ff59e204167c1 WatchSource:0}: Error finding container 91a1105e427a097c9da02272a5a2938cb3cf3d27115dba96e49ff59e204167c1: Status 404 returned error can't find the container with id 91a1105e427a097c9da02272a5a2938cb3cf3d27115dba96e49ff59e204167c1 Oct 11 04:06:23 crc kubenswrapper[4953]: I1011 04:06:23.873117 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerStarted","Data":"91a1105e427a097c9da02272a5a2938cb3cf3d27115dba96e49ff59e204167c1"} Oct 11 04:06:24 crc kubenswrapper[4953]: I1011 04:06:24.883778 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerDied","Data":"e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b"} Oct 11 04:06:24 crc kubenswrapper[4953]: I1011 04:06:24.883554 4953 generic.go:334] "Generic (PLEG): container finished" podID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerID="e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b" exitCode=0 Oct 11 04:06:25 crc kubenswrapper[4953]: I1011 04:06:25.896999 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerStarted","Data":"d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e"} Oct 11 04:06:26 crc kubenswrapper[4953]: I1011 04:06:26.907156 4953 generic.go:334] "Generic (PLEG): container finished" podID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerID="d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e" exitCode=0 Oct 11 04:06:26 crc kubenswrapper[4953]: I1011 04:06:26.907528 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerDied","Data":"d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e"} Oct 11 04:06:27 crc kubenswrapper[4953]: I1011 04:06:27.918479 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerStarted","Data":"9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6"} Oct 11 04:06:27 crc kubenswrapper[4953]: I1011 04:06:27.951265 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4t2pb" podStartSLOduration=3.522974117 podStartE2EDuration="5.951242709s" podCreationTimestamp="2025-10-11 04:06:22 +0000 UTC" firstStartedPulling="2025-10-11 04:06:24.887035302 +0000 UTC m=+4795.820122946" lastFinishedPulling="2025-10-11 04:06:27.315303894 +0000 UTC m=+4798.248391538" observedRunningTime="2025-10-11 04:06:27.940076339 +0000 UTC m=+4798.873163993" watchObservedRunningTime="2025-10-11 04:06:27.951242709 +0000 UTC m=+4798.884330373" Oct 11 04:06:33 crc kubenswrapper[4953]: I1011 04:06:33.222901 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:33 crc kubenswrapper[4953]: I1011 04:06:33.223647 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:33 crc kubenswrapper[4953]: I1011 04:06:33.305657 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:34 crc kubenswrapper[4953]: I1011 04:06:34.030591 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:34 crc kubenswrapper[4953]: I1011 04:06:34.103407 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4t2pb"] Oct 11 04:06:35 crc kubenswrapper[4953]: I1011 04:06:35.990207 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4t2pb" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="registry-server" containerID="cri-o://9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6" gracePeriod=2 Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.505110 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.622380 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-utilities\") pod \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.622504 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g76k4\" (UniqueName: \"kubernetes.io/projected/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-kube-api-access-g76k4\") pod \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.622661 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-catalog-content\") pod \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\" (UID: \"7d6058e1-e1e9-412f-afbb-d7bf99a7d032\") " Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.623512 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-utilities" (OuterVolumeSpecName: "utilities") pod "7d6058e1-e1e9-412f-afbb-d7bf99a7d032" (UID: "7d6058e1-e1e9-412f-afbb-d7bf99a7d032"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.631346 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-kube-api-access-g76k4" (OuterVolumeSpecName: "kube-api-access-g76k4") pod "7d6058e1-e1e9-412f-afbb-d7bf99a7d032" (UID: "7d6058e1-e1e9-412f-afbb-d7bf99a7d032"). InnerVolumeSpecName "kube-api-access-g76k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.635988 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7d6058e1-e1e9-412f-afbb-d7bf99a7d032" (UID: "7d6058e1-e1e9-412f-afbb-d7bf99a7d032"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.725799 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.725837 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g76k4\" (UniqueName: \"kubernetes.io/projected/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-kube-api-access-g76k4\") on node \"crc\" DevicePath \"\"" Oct 11 04:06:36 crc kubenswrapper[4953]: I1011 04:06:36.725851 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d6058e1-e1e9-412f-afbb-d7bf99a7d032-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.001254 4953 generic.go:334] "Generic (PLEG): container finished" podID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerID="9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6" exitCode=0 Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.001305 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerDied","Data":"9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6"} Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.001336 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4t2pb" event={"ID":"7d6058e1-e1e9-412f-afbb-d7bf99a7d032","Type":"ContainerDied","Data":"91a1105e427a097c9da02272a5a2938cb3cf3d27115dba96e49ff59e204167c1"} Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.001401 4953 scope.go:117] "RemoveContainer" containerID="9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.001558 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4t2pb" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.029959 4953 scope.go:117] "RemoveContainer" containerID="d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.055172 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4t2pb"] Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.064125 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4t2pb"] Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.075647 4953 scope.go:117] "RemoveContainer" containerID="e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.104632 4953 scope.go:117] "RemoveContainer" containerID="9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6" Oct 11 04:06:37 crc kubenswrapper[4953]: E1011 04:06:37.106112 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6\": container with ID starting with 9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6 not found: ID does not exist" containerID="9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.106142 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6"} err="failed to get container status \"9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6\": rpc error: code = NotFound desc = could not find container \"9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6\": container with ID starting with 9c6c8f935ac2e12856851de4f5ac5d4d3c3fd7138346facdfa14a6e5afa31cc6 not found: ID does not exist" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.106169 4953 scope.go:117] "RemoveContainer" containerID="d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e" Oct 11 04:06:37 crc kubenswrapper[4953]: E1011 04:06:37.106561 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e\": container with ID starting with d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e not found: ID does not exist" containerID="d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.106582 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e"} err="failed to get container status \"d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e\": rpc error: code = NotFound desc = could not find container \"d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e\": container with ID starting with d59a776cec3f8bdafca078d8f2893c2ae5c96ec810c7df415c442fbb5a4f364e not found: ID does not exist" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.106597 4953 scope.go:117] "RemoveContainer" containerID="e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b" Oct 11 04:06:37 crc kubenswrapper[4953]: E1011 04:06:37.106851 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b\": container with ID starting with e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b not found: ID does not exist" containerID="e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.106874 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b"} err="failed to get container status \"e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b\": rpc error: code = NotFound desc = could not find container \"e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b\": container with ID starting with e0e8c31807b8c445e1bf66371e132b12f6d2358439583bbdaaefeab25fc87b4b not found: ID does not exist" Oct 11 04:06:37 crc kubenswrapper[4953]: I1011 04:06:37.810818 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" path="/var/lib/kubelet/pods/7d6058e1-e1e9-412f-afbb-d7bf99a7d032/volumes" Oct 11 04:06:41 crc kubenswrapper[4953]: I1011 04:06:41.316537 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:06:41 crc kubenswrapper[4953]: I1011 04:06:41.317176 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:06:41 crc kubenswrapper[4953]: I1011 04:06:41.317229 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 04:06:41 crc kubenswrapper[4953]: I1011 04:06:41.317968 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7a9199e900be11c3b033735100eabbd40478f0f5f3da4ada66080b2a744c1d85"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 04:06:41 crc kubenswrapper[4953]: I1011 04:06:41.318026 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://7a9199e900be11c3b033735100eabbd40478f0f5f3da4ada66080b2a744c1d85" gracePeriod=600 Oct 11 04:06:42 crc kubenswrapper[4953]: I1011 04:06:42.047510 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="7a9199e900be11c3b033735100eabbd40478f0f5f3da4ada66080b2a744c1d85" exitCode=0 Oct 11 04:06:42 crc kubenswrapper[4953]: I1011 04:06:42.048318 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"7a9199e900be11c3b033735100eabbd40478f0f5f3da4ada66080b2a744c1d85"} Oct 11 04:06:42 crc kubenswrapper[4953]: I1011 04:06:42.048361 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf"} Oct 11 04:06:42 crc kubenswrapper[4953]: I1011 04:06:42.048387 4953 scope.go:117] "RemoveContainer" containerID="e7abcd399120d811a93a81348db61bb9c6b2ca8cad4ea5e784047398a48577be" Oct 11 04:08:41 crc kubenswrapper[4953]: I1011 04:08:41.317653 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:08:41 crc kubenswrapper[4953]: I1011 04:08:41.318257 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:08:48 crc kubenswrapper[4953]: I1011 04:08:48.418753 4953 generic.go:334] "Generic (PLEG): container finished" podID="7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" containerID="008487a3d791f2307b2d9d887959598c05221119bb75e50172953295fb77a357" exitCode=1 Oct 11 04:08:48 crc kubenswrapper[4953]: I1011 04:08:48.418832 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79","Type":"ContainerDied","Data":"008487a3d791f2307b2d9d887959598c05221119bb75e50172953295fb77a357"} Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.836761 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884344 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884445 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config-secret\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884586 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-workdir\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884683 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-config-data\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884699 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ssh-key\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884780 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-temporary\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884804 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884847 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gq2q4\" (UniqueName: \"kubernetes.io/projected/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-kube-api-access-gq2q4\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.884869 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ca-certs\") pod \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\" (UID: \"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79\") " Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.885443 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-config-data" (OuterVolumeSpecName: "config-data") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.887187 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.893082 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.903451 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "test-operator-logs") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.903855 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-kube-api-access-gq2q4" (OuterVolumeSpecName: "kube-api-access-gq2q4") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "kube-api-access-gq2q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.929824 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.931089 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.933073 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.962112 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" (UID: "7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.987729 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gq2q4\" (UniqueName: \"kubernetes.io/projected/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-kube-api-access-gq2q4\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.987792 4953 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.987805 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.987818 4953 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.987831 4953 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.988292 4953 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-config-data\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.988318 4953 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.988334 4953 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:49 crc kubenswrapper[4953]: I1011 04:08:49.988394 4953 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 11 04:08:50 crc kubenswrapper[4953]: I1011 04:08:50.022844 4953 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 11 04:08:50 crc kubenswrapper[4953]: I1011 04:08:50.090388 4953 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 11 04:08:50 crc kubenswrapper[4953]: I1011 04:08:50.440776 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79","Type":"ContainerDied","Data":"ef91a5ee40aa7a7a9afcbadc7780474f0598530c106eb7974ecc7ee7cf06fd21"} Oct 11 04:08:50 crc kubenswrapper[4953]: I1011 04:08:50.440825 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef91a5ee40aa7a7a9afcbadc7780474f0598530c106eb7974ecc7ee7cf06fd21" Oct 11 04:08:50 crc kubenswrapper[4953]: I1011 04:08:50.440856 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.531592 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 11 04:08:58 crc kubenswrapper[4953]: E1011 04:08:58.532686 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" containerName="tempest-tests-tempest-tests-runner" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.532701 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" containerName="tempest-tests-tempest-tests-runner" Oct 11 04:08:58 crc kubenswrapper[4953]: E1011 04:08:58.532723 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="extract-utilities" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.532731 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="extract-utilities" Oct 11 04:08:58 crc kubenswrapper[4953]: E1011 04:08:58.532749 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="registry-server" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.532758 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="registry-server" Oct 11 04:08:58 crc kubenswrapper[4953]: E1011 04:08:58.532780 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="extract-content" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.532788 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="extract-content" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.533051 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79" containerName="tempest-tests-tempest-tests-runner" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.533075 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6058e1-e1e9-412f-afbb-d7bf99a7d032" containerName="registry-server" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.534033 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.547327 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-tgzx8" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.551562 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.573535 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9snht\" (UniqueName: \"kubernetes.io/projected/821b6cab-651b-4ae4-a477-35ddb4641b8b-kube-api-access-9snht\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.573597 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.676076 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9snht\" (UniqueName: \"kubernetes.io/projected/821b6cab-651b-4ae4-a477-35ddb4641b8b-kube-api-access-9snht\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.676184 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.676881 4953 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.696739 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9snht\" (UniqueName: \"kubernetes.io/projected/821b6cab-651b-4ae4-a477-35ddb4641b8b-kube-api-access-9snht\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.719241 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"821b6cab-651b-4ae4-a477-35ddb4641b8b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:58 crc kubenswrapper[4953]: I1011 04:08:58.870035 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 11 04:08:59 crc kubenswrapper[4953]: I1011 04:08:59.387282 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 11 04:08:59 crc kubenswrapper[4953]: I1011 04:08:59.568976 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"821b6cab-651b-4ae4-a477-35ddb4641b8b","Type":"ContainerStarted","Data":"088b5a7fa8274677dd4fda4d6a08599f6db7a5c528708fbf0c3298485df71f3a"} Oct 11 04:09:01 crc kubenswrapper[4953]: I1011 04:09:01.591254 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"821b6cab-651b-4ae4-a477-35ddb4641b8b","Type":"ContainerStarted","Data":"762d0c4c4cd7a438cbcd5af09e7fdc0812a1c447910fb61dd7c5921145af1a06"} Oct 11 04:09:01 crc kubenswrapper[4953]: I1011 04:09:01.612597 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.529069731 podStartE2EDuration="3.612575251s" podCreationTimestamp="2025-10-11 04:08:58 +0000 UTC" firstStartedPulling="2025-10-11 04:08:59.395226005 +0000 UTC m=+4950.328313649" lastFinishedPulling="2025-10-11 04:09:00.478731525 +0000 UTC m=+4951.411819169" observedRunningTime="2025-10-11 04:09:01.605992346 +0000 UTC m=+4952.539080000" watchObservedRunningTime="2025-10-11 04:09:01.612575251 +0000 UTC m=+4952.545662885" Oct 11 04:09:11 crc kubenswrapper[4953]: I1011 04:09:11.316803 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:09:11 crc kubenswrapper[4953]: I1011 04:09:11.317465 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.555688 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sp9vl/must-gather-7ts64"] Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.557747 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.559527 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-sp9vl"/"default-dockercfg-5t7xs" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.559786 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-sp9vl"/"kube-root-ca.crt" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.569915 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-sp9vl/must-gather-7ts64"] Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.573395 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-sp9vl"/"openshift-service-ca.crt" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.674347 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ca5022b0-d6c5-4b55-919c-d70e623702cc-must-gather-output\") pod \"must-gather-7ts64\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.674510 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfgmn\" (UniqueName: \"kubernetes.io/projected/ca5022b0-d6c5-4b55-919c-d70e623702cc-kube-api-access-jfgmn\") pod \"must-gather-7ts64\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.776214 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ca5022b0-d6c5-4b55-919c-d70e623702cc-must-gather-output\") pod \"must-gather-7ts64\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.776304 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfgmn\" (UniqueName: \"kubernetes.io/projected/ca5022b0-d6c5-4b55-919c-d70e623702cc-kube-api-access-jfgmn\") pod \"must-gather-7ts64\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.776937 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ca5022b0-d6c5-4b55-919c-d70e623702cc-must-gather-output\") pod \"must-gather-7ts64\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.796904 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfgmn\" (UniqueName: \"kubernetes.io/projected/ca5022b0-d6c5-4b55-919c-d70e623702cc-kube-api-access-jfgmn\") pod \"must-gather-7ts64\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:33 crc kubenswrapper[4953]: I1011 04:09:33.895862 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:09:34 crc kubenswrapper[4953]: I1011 04:09:34.422757 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-sp9vl/must-gather-7ts64"] Oct 11 04:09:35 crc kubenswrapper[4953]: I1011 04:09:35.050424 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/must-gather-7ts64" event={"ID":"ca5022b0-d6c5-4b55-919c-d70e623702cc","Type":"ContainerStarted","Data":"db1fe43d024f0d9058e8b29128938f8ca62eba62d8af0108353c9d89785fadbf"} Oct 11 04:09:41 crc kubenswrapper[4953]: I1011 04:09:41.316822 4953 patch_prober.go:28] interesting pod/machine-config-daemon-9jz9g container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 11 04:09:41 crc kubenswrapper[4953]: I1011 04:09:41.317269 4953 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 11 04:09:41 crc kubenswrapper[4953]: I1011 04:09:41.317315 4953 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" Oct 11 04:09:41 crc kubenswrapper[4953]: I1011 04:09:41.318110 4953 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf"} pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 11 04:09:41 crc kubenswrapper[4953]: I1011 04:09:41.318157 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerName="machine-config-daemon" containerID="cri-o://f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" gracePeriod=600 Oct 11 04:09:41 crc kubenswrapper[4953]: E1011 04:09:41.442816 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.124462 4953 generic.go:334] "Generic (PLEG): container finished" podID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" exitCode=0 Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.125089 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerDied","Data":"f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf"} Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.125171 4953 scope.go:117] "RemoveContainer" containerID="7a9199e900be11c3b033735100eabbd40478f0f5f3da4ada66080b2a744c1d85" Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.125839 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:09:42 crc kubenswrapper[4953]: E1011 04:09:42.126096 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.128928 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/must-gather-7ts64" event={"ID":"ca5022b0-d6c5-4b55-919c-d70e623702cc","Type":"ContainerStarted","Data":"a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee"} Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.128967 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/must-gather-7ts64" event={"ID":"ca5022b0-d6c5-4b55-919c-d70e623702cc","Type":"ContainerStarted","Data":"844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219"} Oct 11 04:09:42 crc kubenswrapper[4953]: I1011 04:09:42.176944 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-sp9vl/must-gather-7ts64" podStartSLOduration=3.311715499 podStartE2EDuration="9.176916474s" podCreationTimestamp="2025-10-11 04:09:33 +0000 UTC" firstStartedPulling="2025-10-11 04:09:34.443443306 +0000 UTC m=+4985.376530960" lastFinishedPulling="2025-10-11 04:09:40.308644291 +0000 UTC m=+4991.241731935" observedRunningTime="2025-10-11 04:09:42.16120662 +0000 UTC m=+4993.094294284" watchObservedRunningTime="2025-10-11 04:09:42.176916474 +0000 UTC m=+4993.110004148" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.128683 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jx88w"] Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.132139 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.143145 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jx88w"] Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.197892 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-catalog-content\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.197995 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-utilities\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.198065 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rgc4\" (UniqueName: \"kubernetes.io/projected/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-kube-api-access-7rgc4\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.299587 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-catalog-content\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.299693 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-utilities\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.299747 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rgc4\" (UniqueName: \"kubernetes.io/projected/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-kube-api-access-7rgc4\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.300117 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-utilities\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.300345 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-catalog-content\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.331918 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rgc4\" (UniqueName: \"kubernetes.io/projected/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-kube-api-access-7rgc4\") pod \"redhat-operators-jx88w\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.457033 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:09:44 crc kubenswrapper[4953]: I1011 04:09:44.744485 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jx88w"] Oct 11 04:09:45 crc kubenswrapper[4953]: I1011 04:09:45.173870 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerStarted","Data":"46ce6fc1313cdde4a321929cf35f55b49aefb7c3d4d75c59d07c01c5167ba733"} Oct 11 04:09:46 crc kubenswrapper[4953]: I1011 04:09:46.195440 4953 generic.go:334] "Generic (PLEG): container finished" podID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerID="91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d" exitCode=0 Oct 11 04:09:46 crc kubenswrapper[4953]: I1011 04:09:46.195864 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerDied","Data":"91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d"} Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.214473 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-stnj5"] Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.217629 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.358672 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c59fd10-df3c-4c55-b274-858897ec7d93-host\") pod \"crc-debug-stnj5\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.358950 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5llv\" (UniqueName: \"kubernetes.io/projected/9c59fd10-df3c-4c55-b274-858897ec7d93-kube-api-access-m5llv\") pod \"crc-debug-stnj5\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.460337 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c59fd10-df3c-4c55-b274-858897ec7d93-host\") pod \"crc-debug-stnj5\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.460687 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5llv\" (UniqueName: \"kubernetes.io/projected/9c59fd10-df3c-4c55-b274-858897ec7d93-kube-api-access-m5llv\") pod \"crc-debug-stnj5\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.460506 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c59fd10-df3c-4c55-b274-858897ec7d93-host\") pod \"crc-debug-stnj5\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.491596 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5llv\" (UniqueName: \"kubernetes.io/projected/9c59fd10-df3c-4c55-b274-858897ec7d93-kube-api-access-m5llv\") pod \"crc-debug-stnj5\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: I1011 04:09:47.540725 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:09:47 crc kubenswrapper[4953]: W1011 04:09:47.576774 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c59fd10_df3c_4c55_b274_858897ec7d93.slice/crio-eb13e1b8b6076ca8ce4c763e28a011da7dc48fc362c6bdb3dc8bc815f2a74a92 WatchSource:0}: Error finding container eb13e1b8b6076ca8ce4c763e28a011da7dc48fc362c6bdb3dc8bc815f2a74a92: Status 404 returned error can't find the container with id eb13e1b8b6076ca8ce4c763e28a011da7dc48fc362c6bdb3dc8bc815f2a74a92 Oct 11 04:09:48 crc kubenswrapper[4953]: I1011 04:09:48.214044 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" event={"ID":"9c59fd10-df3c-4c55-b274-858897ec7d93","Type":"ContainerStarted","Data":"eb13e1b8b6076ca8ce4c763e28a011da7dc48fc362c6bdb3dc8bc815f2a74a92"} Oct 11 04:09:49 crc kubenswrapper[4953]: I1011 04:09:49.224935 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerStarted","Data":"0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c"} Oct 11 04:09:52 crc kubenswrapper[4953]: I1011 04:09:52.250322 4953 generic.go:334] "Generic (PLEG): container finished" podID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerID="0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c" exitCode=0 Oct 11 04:09:52 crc kubenswrapper[4953]: I1011 04:09:52.250379 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerDied","Data":"0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c"} Oct 11 04:09:54 crc kubenswrapper[4953]: I1011 04:09:54.794827 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:09:54 crc kubenswrapper[4953]: E1011 04:09:54.795689 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:09:58 crc kubenswrapper[4953]: I1011 04:09:58.019837 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 04:09:58 crc kubenswrapper[4953]: I1011 04:09:58.315884 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" event={"ID":"9c59fd10-df3c-4c55-b274-858897ec7d93","Type":"ContainerStarted","Data":"75b9552e9ae46c110c62d3ef4731328e09508f46f8bfed43a3c45a761c8dbece"} Oct 11 04:09:58 crc kubenswrapper[4953]: I1011 04:09:58.332656 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" podStartSLOduration=0.814514448 podStartE2EDuration="11.332639857s" podCreationTimestamp="2025-10-11 04:09:47 +0000 UTC" firstStartedPulling="2025-10-11 04:09:47.579407325 +0000 UTC m=+4998.512494979" lastFinishedPulling="2025-10-11 04:09:58.097532744 +0000 UTC m=+5009.030620388" observedRunningTime="2025-10-11 04:09:58.327938929 +0000 UTC m=+5009.261026573" watchObservedRunningTime="2025-10-11 04:09:58.332639857 +0000 UTC m=+5009.265727501" Oct 11 04:09:59 crc kubenswrapper[4953]: I1011 04:09:59.326383 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerStarted","Data":"5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524"} Oct 11 04:09:59 crc kubenswrapper[4953]: I1011 04:09:59.346713 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jx88w" podStartSLOduration=3.122642588 podStartE2EDuration="15.346694605s" podCreationTimestamp="2025-10-11 04:09:44 +0000 UTC" firstStartedPulling="2025-10-11 04:09:46.211083003 +0000 UTC m=+4997.144170647" lastFinishedPulling="2025-10-11 04:09:58.43513502 +0000 UTC m=+5009.368222664" observedRunningTime="2025-10-11 04:09:59.341788672 +0000 UTC m=+5010.274876316" watchObservedRunningTime="2025-10-11 04:09:59.346694605 +0000 UTC m=+5010.279782239" Oct 11 04:10:04 crc kubenswrapper[4953]: I1011 04:10:04.457276 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:10:04 crc kubenswrapper[4953]: I1011 04:10:04.457956 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:10:04 crc kubenswrapper[4953]: I1011 04:10:04.510125 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:10:05 crc kubenswrapper[4953]: I1011 04:10:05.588278 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:10:05 crc kubenswrapper[4953]: I1011 04:10:05.647170 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jx88w"] Oct 11 04:10:07 crc kubenswrapper[4953]: I1011 04:10:07.391267 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jx88w" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="registry-server" containerID="cri-o://5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524" gracePeriod=2 Oct 11 04:10:07 crc kubenswrapper[4953]: I1011 04:10:07.796407 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:10:07 crc kubenswrapper[4953]: E1011 04:10:07.813527 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.066850 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.250588 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rgc4\" (UniqueName: \"kubernetes.io/projected/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-kube-api-access-7rgc4\") pod \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.250753 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-utilities\") pod \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.250876 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-catalog-content\") pod \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\" (UID: \"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57\") " Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.251569 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-utilities" (OuterVolumeSpecName: "utilities") pod "a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" (UID: "a7cb842f-2694-4c50-b8cb-ac3c71c9fa57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.263870 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-kube-api-access-7rgc4" (OuterVolumeSpecName: "kube-api-access-7rgc4") pod "a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" (UID: "a7cb842f-2694-4c50-b8cb-ac3c71c9fa57"). InnerVolumeSpecName "kube-api-access-7rgc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.340754 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" (UID: "a7cb842f-2694-4c50-b8cb-ac3c71c9fa57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.352836 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.352871 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rgc4\" (UniqueName: \"kubernetes.io/projected/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-kube-api-access-7rgc4\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.352884 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.400158 4953 generic.go:334] "Generic (PLEG): container finished" podID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerID="5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524" exitCode=0 Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.400204 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerDied","Data":"5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524"} Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.400227 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jx88w" event={"ID":"a7cb842f-2694-4c50-b8cb-ac3c71c9fa57","Type":"ContainerDied","Data":"46ce6fc1313cdde4a321929cf35f55b49aefb7c3d4d75c59d07c01c5167ba733"} Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.400245 4953 scope.go:117] "RemoveContainer" containerID="5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524" Oct 11 04:10:08 crc kubenswrapper[4953]: I1011 04:10:08.400352 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jx88w" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.239065 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jx88w"] Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.242197 4953 scope.go:117] "RemoveContainer" containerID="0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.247010 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jx88w"] Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.277790 4953 scope.go:117] "RemoveContainer" containerID="91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.332980 4953 scope.go:117] "RemoveContainer" containerID="5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524" Oct 11 04:10:10 crc kubenswrapper[4953]: E1011 04:10:10.333545 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524\": container with ID starting with 5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524 not found: ID does not exist" containerID="5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.333588 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524"} err="failed to get container status \"5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524\": rpc error: code = NotFound desc = could not find container \"5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524\": container with ID starting with 5f227d51f638e083766d75ef4945862dfcc1f029f42633d2b73b709c4a859524 not found: ID does not exist" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.333653 4953 scope.go:117] "RemoveContainer" containerID="0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c" Oct 11 04:10:10 crc kubenswrapper[4953]: E1011 04:10:10.334028 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c\": container with ID starting with 0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c not found: ID does not exist" containerID="0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.334052 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c"} err="failed to get container status \"0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c\": rpc error: code = NotFound desc = could not find container \"0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c\": container with ID starting with 0897c7651f2aae5373f1d5d36266b4dae300a84170d2daf71df66271eba5970c not found: ID does not exist" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.334070 4953 scope.go:117] "RemoveContainer" containerID="91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d" Oct 11 04:10:10 crc kubenswrapper[4953]: E1011 04:10:10.334300 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d\": container with ID starting with 91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d not found: ID does not exist" containerID="91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d" Oct 11 04:10:10 crc kubenswrapper[4953]: I1011 04:10:10.334328 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d"} err="failed to get container status \"91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d\": rpc error: code = NotFound desc = could not find container \"91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d\": container with ID starting with 91a3c06d33ea830d2cbb3390db48d476f8e32d3b19cb01c3adf2e4fe9a08c91d not found: ID does not exist" Oct 11 04:10:11 crc kubenswrapper[4953]: I1011 04:10:11.810484 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" path="/var/lib/kubelet/pods/a7cb842f-2694-4c50-b8cb-ac3c71c9fa57/volumes" Oct 11 04:10:19 crc kubenswrapper[4953]: I1011 04:10:19.805325 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:10:19 crc kubenswrapper[4953]: E1011 04:10:19.806169 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:10:34 crc kubenswrapper[4953]: I1011 04:10:34.795486 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:10:34 crc kubenswrapper[4953]: E1011 04:10:34.796323 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:10:41 crc kubenswrapper[4953]: I1011 04:10:41.703243 4953 generic.go:334] "Generic (PLEG): container finished" podID="9c59fd10-df3c-4c55-b274-858897ec7d93" containerID="75b9552e9ae46c110c62d3ef4731328e09508f46f8bfed43a3c45a761c8dbece" exitCode=0 Oct 11 04:10:41 crc kubenswrapper[4953]: I1011 04:10:41.703345 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" event={"ID":"9c59fd10-df3c-4c55-b274-858897ec7d93","Type":"ContainerDied","Data":"75b9552e9ae46c110c62d3ef4731328e09508f46f8bfed43a3c45a761c8dbece"} Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.818446 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.848191 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-stnj5"] Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.857343 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-stnj5"] Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.960481 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5llv\" (UniqueName: \"kubernetes.io/projected/9c59fd10-df3c-4c55-b274-858897ec7d93-kube-api-access-m5llv\") pod \"9c59fd10-df3c-4c55-b274-858897ec7d93\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.960726 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c59fd10-df3c-4c55-b274-858897ec7d93-host\") pod \"9c59fd10-df3c-4c55-b274-858897ec7d93\" (UID: \"9c59fd10-df3c-4c55-b274-858897ec7d93\") " Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.960797 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c59fd10-df3c-4c55-b274-858897ec7d93-host" (OuterVolumeSpecName: "host") pod "9c59fd10-df3c-4c55-b274-858897ec7d93" (UID: "9c59fd10-df3c-4c55-b274-858897ec7d93"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.961139 4953 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c59fd10-df3c-4c55-b274-858897ec7d93-host\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:42 crc kubenswrapper[4953]: I1011 04:10:42.965523 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c59fd10-df3c-4c55-b274-858897ec7d93-kube-api-access-m5llv" (OuterVolumeSpecName: "kube-api-access-m5llv") pod "9c59fd10-df3c-4c55-b274-858897ec7d93" (UID: "9c59fd10-df3c-4c55-b274-858897ec7d93"). InnerVolumeSpecName "kube-api-access-m5llv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:10:43 crc kubenswrapper[4953]: I1011 04:10:43.062970 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5llv\" (UniqueName: \"kubernetes.io/projected/9c59fd10-df3c-4c55-b274-858897ec7d93-kube-api-access-m5llv\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:43 crc kubenswrapper[4953]: I1011 04:10:43.720071 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb13e1b8b6076ca8ce4c763e28a011da7dc48fc362c6bdb3dc8bc815f2a74a92" Oct 11 04:10:43 crc kubenswrapper[4953]: I1011 04:10:43.720183 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-stnj5" Oct 11 04:10:43 crc kubenswrapper[4953]: I1011 04:10:43.825483 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c59fd10-df3c-4c55-b274-858897ec7d93" path="/var/lib/kubelet/pods/9c59fd10-df3c-4c55-b274-858897ec7d93/volumes" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.074797 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-6zpkm"] Oct 11 04:10:44 crc kubenswrapper[4953]: E1011 04:10:44.075183 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="extract-utilities" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.075870 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="extract-utilities" Oct 11 04:10:44 crc kubenswrapper[4953]: E1011 04:10:44.075894 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="extract-content" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.075902 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="extract-content" Oct 11 04:10:44 crc kubenswrapper[4953]: E1011 04:10:44.075958 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="registry-server" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.075966 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="registry-server" Oct 11 04:10:44 crc kubenswrapper[4953]: E1011 04:10:44.075981 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c59fd10-df3c-4c55-b274-858897ec7d93" containerName="container-00" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.075987 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c59fd10-df3c-4c55-b274-858897ec7d93" containerName="container-00" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.076276 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cb842f-2694-4c50-b8cb-ac3c71c9fa57" containerName="registry-server" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.076310 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c59fd10-df3c-4c55-b274-858897ec7d93" containerName="container-00" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.076911 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.196843 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9znbt\" (UniqueName: \"kubernetes.io/projected/fd955e03-3757-4088-ba61-9083a4fb71d1-kube-api-access-9znbt\") pod \"crc-debug-6zpkm\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.196886 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd955e03-3757-4088-ba61-9083a4fb71d1-host\") pod \"crc-debug-6zpkm\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.298501 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9znbt\" (UniqueName: \"kubernetes.io/projected/fd955e03-3757-4088-ba61-9083a4fb71d1-kube-api-access-9znbt\") pod \"crc-debug-6zpkm\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.298548 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd955e03-3757-4088-ba61-9083a4fb71d1-host\") pod \"crc-debug-6zpkm\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.298718 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd955e03-3757-4088-ba61-9083a4fb71d1-host\") pod \"crc-debug-6zpkm\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.318572 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9znbt\" (UniqueName: \"kubernetes.io/projected/fd955e03-3757-4088-ba61-9083a4fb71d1-kube-api-access-9znbt\") pod \"crc-debug-6zpkm\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.405832 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.728068 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" event={"ID":"fd955e03-3757-4088-ba61-9083a4fb71d1","Type":"ContainerStarted","Data":"f1b66006e5e108eec9e48459b2d524206fe6bfd112d647eb1a7cf550676de7a7"} Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.728418 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" event={"ID":"fd955e03-3757-4088-ba61-9083a4fb71d1","Type":"ContainerStarted","Data":"8a0f38e7433691541557cb8136c8741ec912c751aee30cb5bdd6d0c6e7481e85"} Oct 11 04:10:44 crc kubenswrapper[4953]: I1011 04:10:44.746205 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" podStartSLOduration=0.746188987 podStartE2EDuration="746.188987ms" podCreationTimestamp="2025-10-11 04:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-11 04:10:44.739402326 +0000 UTC m=+5055.672489970" watchObservedRunningTime="2025-10-11 04:10:44.746188987 +0000 UTC m=+5055.679276621" Oct 11 04:10:45 crc kubenswrapper[4953]: I1011 04:10:45.736101 4953 generic.go:334] "Generic (PLEG): container finished" podID="fd955e03-3757-4088-ba61-9083a4fb71d1" containerID="f1b66006e5e108eec9e48459b2d524206fe6bfd112d647eb1a7cf550676de7a7" exitCode=0 Oct 11 04:10:45 crc kubenswrapper[4953]: I1011 04:10:45.736195 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" event={"ID":"fd955e03-3757-4088-ba61-9083a4fb71d1","Type":"ContainerDied","Data":"f1b66006e5e108eec9e48459b2d524206fe6bfd112d647eb1a7cf550676de7a7"} Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.084181 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.192131 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-6zpkm"] Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.199773 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-6zpkm"] Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.254314 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd955e03-3757-4088-ba61-9083a4fb71d1-host\") pod \"fd955e03-3757-4088-ba61-9083a4fb71d1\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.254411 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd955e03-3757-4088-ba61-9083a4fb71d1-host" (OuterVolumeSpecName: "host") pod "fd955e03-3757-4088-ba61-9083a4fb71d1" (UID: "fd955e03-3757-4088-ba61-9083a4fb71d1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.256565 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9znbt\" (UniqueName: \"kubernetes.io/projected/fd955e03-3757-4088-ba61-9083a4fb71d1-kube-api-access-9znbt\") pod \"fd955e03-3757-4088-ba61-9083a4fb71d1\" (UID: \"fd955e03-3757-4088-ba61-9083a4fb71d1\") " Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.257554 4953 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd955e03-3757-4088-ba61-9083a4fb71d1-host\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.272338 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd955e03-3757-4088-ba61-9083a4fb71d1-kube-api-access-9znbt" (OuterVolumeSpecName: "kube-api-access-9znbt") pod "fd955e03-3757-4088-ba61-9083a4fb71d1" (UID: "fd955e03-3757-4088-ba61-9083a4fb71d1"). InnerVolumeSpecName "kube-api-access-9znbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.359925 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9znbt\" (UniqueName: \"kubernetes.io/projected/fd955e03-3757-4088-ba61-9083a4fb71d1-kube-api-access-9znbt\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.759270 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a0f38e7433691541557cb8136c8741ec912c751aee30cb5bdd6d0c6e7481e85" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.759344 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-6zpkm" Oct 11 04:10:47 crc kubenswrapper[4953]: I1011 04:10:47.806463 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd955e03-3757-4088-ba61-9083a4fb71d1" path="/var/lib/kubelet/pods/fd955e03-3757-4088-ba61-9083a4fb71d1/volumes" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.358632 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-kpl4c"] Oct 11 04:10:48 crc kubenswrapper[4953]: E1011 04:10:48.359100 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd955e03-3757-4088-ba61-9083a4fb71d1" containerName="container-00" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.359116 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd955e03-3757-4088-ba61-9083a4fb71d1" containerName="container-00" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.359320 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd955e03-3757-4088-ba61-9083a4fb71d1" containerName="container-00" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.359932 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.482030 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67033419-483f-4e32-8422-744ecc9c0270-host\") pod \"crc-debug-kpl4c\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.482327 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdpmv\" (UniqueName: \"kubernetes.io/projected/67033419-483f-4e32-8422-744ecc9c0270-kube-api-access-cdpmv\") pod \"crc-debug-kpl4c\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.583952 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdpmv\" (UniqueName: \"kubernetes.io/projected/67033419-483f-4e32-8422-744ecc9c0270-kube-api-access-cdpmv\") pod \"crc-debug-kpl4c\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.584144 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67033419-483f-4e32-8422-744ecc9c0270-host\") pod \"crc-debug-kpl4c\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.584227 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67033419-483f-4e32-8422-744ecc9c0270-host\") pod \"crc-debug-kpl4c\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.965975 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdpmv\" (UniqueName: \"kubernetes.io/projected/67033419-483f-4e32-8422-744ecc9c0270-kube-api-access-cdpmv\") pod \"crc-debug-kpl4c\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:48 crc kubenswrapper[4953]: I1011 04:10:48.977122 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:49 crc kubenswrapper[4953]: W1011 04:10:49.013392 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67033419_483f_4e32_8422_744ecc9c0270.slice/crio-4306179302b5c3c21dc0ba6e6302d4a707b5b47a2b09fb907d55297d6eec338b WatchSource:0}: Error finding container 4306179302b5c3c21dc0ba6e6302d4a707b5b47a2b09fb907d55297d6eec338b: Status 404 returned error can't find the container with id 4306179302b5c3c21dc0ba6e6302d4a707b5b47a2b09fb907d55297d6eec338b Oct 11 04:10:49 crc kubenswrapper[4953]: I1011 04:10:49.785005 4953 generic.go:334] "Generic (PLEG): container finished" podID="67033419-483f-4e32-8422-744ecc9c0270" containerID="50cfb568ec1ed0c62496819b49ac898877d01bb9d806ae3bb88ad23a5a88ad15" exitCode=0 Oct 11 04:10:49 crc kubenswrapper[4953]: I1011 04:10:49.785145 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" event={"ID":"67033419-483f-4e32-8422-744ecc9c0270","Type":"ContainerDied","Data":"50cfb568ec1ed0c62496819b49ac898877d01bb9d806ae3bb88ad23a5a88ad15"} Oct 11 04:10:49 crc kubenswrapper[4953]: I1011 04:10:49.785584 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" event={"ID":"67033419-483f-4e32-8422-744ecc9c0270","Type":"ContainerStarted","Data":"4306179302b5c3c21dc0ba6e6302d4a707b5b47a2b09fb907d55297d6eec338b"} Oct 11 04:10:49 crc kubenswrapper[4953]: I1011 04:10:49.797236 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:10:49 crc kubenswrapper[4953]: E1011 04:10:49.797768 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:10:49 crc kubenswrapper[4953]: I1011 04:10:49.832636 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-kpl4c"] Oct 11 04:10:49 crc kubenswrapper[4953]: I1011 04:10:49.842575 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sp9vl/crc-debug-kpl4c"] Oct 11 04:10:50 crc kubenswrapper[4953]: I1011 04:10:50.952568 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.035872 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdpmv\" (UniqueName: \"kubernetes.io/projected/67033419-483f-4e32-8422-744ecc9c0270-kube-api-access-cdpmv\") pod \"67033419-483f-4e32-8422-744ecc9c0270\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.036181 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67033419-483f-4e32-8422-744ecc9c0270-host\") pod \"67033419-483f-4e32-8422-744ecc9c0270\" (UID: \"67033419-483f-4e32-8422-744ecc9c0270\") " Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.036407 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67033419-483f-4e32-8422-744ecc9c0270-host" (OuterVolumeSpecName: "host") pod "67033419-483f-4e32-8422-744ecc9c0270" (UID: "67033419-483f-4e32-8422-744ecc9c0270"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.037030 4953 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67033419-483f-4e32-8422-744ecc9c0270-host\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.040718 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67033419-483f-4e32-8422-744ecc9c0270-kube-api-access-cdpmv" (OuterVolumeSpecName: "kube-api-access-cdpmv") pod "67033419-483f-4e32-8422-744ecc9c0270" (UID: "67033419-483f-4e32-8422-744ecc9c0270"). InnerVolumeSpecName "kube-api-access-cdpmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.138669 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdpmv\" (UniqueName: \"kubernetes.io/projected/67033419-483f-4e32-8422-744ecc9c0270-kube-api-access-cdpmv\") on node \"crc\" DevicePath \"\"" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.804402 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.807225 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67033419-483f-4e32-8422-744ecc9c0270" path="/var/lib/kubelet/pods/67033419-483f-4e32-8422-744ecc9c0270/volumes" Oct 11 04:10:51 crc kubenswrapper[4953]: I1011 04:10:51.807932 4953 scope.go:117] "RemoveContainer" containerID="50cfb568ec1ed0c62496819b49ac898877d01bb9d806ae3bb88ad23a5a88ad15" Oct 11 04:10:52 crc kubenswrapper[4953]: I1011 04:10:52.812057 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ccd664cc4-t4w8m_b9eef66c-5177-4fe9-922f-099e01797490/barbican-api/0.log" Oct 11 04:10:52 crc kubenswrapper[4953]: I1011 04:10:52.994400 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5ccd664cc4-t4w8m_b9eef66c-5177-4fe9-922f-099e01797490/barbican-api-log/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.073773 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-55994c8b46-9zhbn_bbf6cb99-5239-4d59-8710-ecb946f343ac/barbican-keystone-listener/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.293343 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-55994c8b46-9zhbn_bbf6cb99-5239-4d59-8710-ecb946f343ac/barbican-keystone-listener-log/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.322392 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-57d964cfc9-nsdbv_321fd9ba-894b-4da7-a3eb-a9052645f13b/barbican-worker/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.480560 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-57d964cfc9-nsdbv_321fd9ba-894b-4da7-a3eb-a9052645f13b/barbican-worker-log/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.516266 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-lpr5s_c4fa87fc-5064-4fd7-93d7-08ee6d0428bd/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.725840 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d1608d80-9530-49ad-9bd2-43d2865e6eeb/ceilometer-central-agent/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.782995 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d1608d80-9530-49ad-9bd2-43d2865e6eeb/ceilometer-notification-agent/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.931098 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d1608d80-9530-49ad-9bd2-43d2865e6eeb/proxy-httpd/0.log" Oct 11 04:10:53 crc kubenswrapper[4953]: I1011 04:10:53.943731 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d1608d80-9530-49ad-9bd2-43d2865e6eeb/sg-core/0.log" Oct 11 04:10:54 crc kubenswrapper[4953]: I1011 04:10:54.108076 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-2tphq_1dfa1f56-823a-4c82-a73c-5eb550c9f00b/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:54 crc kubenswrapper[4953]: I1011 04:10:54.238162 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-9scwq_313dfcf2-f0a4-452e-9f91-dcc5ba79b581/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:55 crc kubenswrapper[4953]: I1011 04:10:55.321585 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c66ca321-9e90-4d55-a75f-a9cb60949914/cinder-api/0.log" Oct 11 04:10:55 crc kubenswrapper[4953]: I1011 04:10:55.323615 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_c66ca321-9e90-4d55-a75f-a9cb60949914/cinder-api-log/0.log" Oct 11 04:10:55 crc kubenswrapper[4953]: I1011 04:10:55.555986 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_0cb94931-13cd-42d6-89e5-0862980caa67/probe/0.log" Oct 11 04:10:55 crc kubenswrapper[4953]: I1011 04:10:55.809321 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fa64359f-1864-4e68-8a0e-df12bf6a204b/cinder-scheduler/0.log" Oct 11 04:10:55 crc kubenswrapper[4953]: I1011 04:10:55.817752 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_fa64359f-1864-4e68-8a0e-df12bf6a204b/probe/0.log" Oct 11 04:10:56 crc kubenswrapper[4953]: I1011 04:10:56.027300 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_a32c6695-97c7-4d7a-86f7-7e1a6d736e56/probe/0.log" Oct 11 04:10:56 crc kubenswrapper[4953]: I1011 04:10:56.209623 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-tz9n2_2329ccc0-b566-4f3e-a67a-3fc4c5df824a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:57 crc kubenswrapper[4953]: I1011 04:10:57.084874 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-955dp_9e449589-3a86-4765-b844-ff6acbb3edf1/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:57 crc kubenswrapper[4953]: I1011 04:10:57.264893 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-76b5fdb995-x4cwh_c829e7d9-35e6-4927-8a22-0eee6c0e4846/init/0.log" Oct 11 04:10:57 crc kubenswrapper[4953]: I1011 04:10:57.471761 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-76b5fdb995-x4cwh_c829e7d9-35e6-4927-8a22-0eee6c0e4846/init/0.log" Oct 11 04:10:57 crc kubenswrapper[4953]: I1011 04:10:57.613242 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-76b5fdb995-x4cwh_c829e7d9-35e6-4927-8a22-0eee6c0e4846/dnsmasq-dns/0.log" Oct 11 04:10:57 crc kubenswrapper[4953]: I1011 04:10:57.812060 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_cd817825-541e-42b4-81dc-af9b352ce6c0/glance-httpd/0.log" Oct 11 04:10:57 crc kubenswrapper[4953]: I1011 04:10:57.884190 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_cd817825-541e-42b4-81dc-af9b352ce6c0/glance-log/0.log" Oct 11 04:10:58 crc kubenswrapper[4953]: I1011 04:10:58.117874 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_0746b898-8459-41c7-b3e8-251b5b3a2412/glance-httpd/0.log" Oct 11 04:10:58 crc kubenswrapper[4953]: I1011 04:10:58.723188 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_0cb94931-13cd-42d6-89e5-0862980caa67/cinder-backup/0.log" Oct 11 04:10:58 crc kubenswrapper[4953]: I1011 04:10:58.736775 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_0746b898-8459-41c7-b3e8-251b5b3a2412/glance-log/0.log" Oct 11 04:10:59 crc kubenswrapper[4953]: I1011 04:10:59.055696 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-d5ffc79dd-kqmlf_d9aab956-be2a-4013-9071-a9cfb31c7da2/horizon/0.log" Oct 11 04:10:59 crc kubenswrapper[4953]: I1011 04:10:59.255379 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-45464_b52f2621-2d4c-4196-9ede-38eede157dd9/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:59 crc kubenswrapper[4953]: I1011 04:10:59.314950 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-d5ffc79dd-kqmlf_d9aab956-be2a-4013-9071-a9cfb31c7da2/horizon-log/0.log" Oct 11 04:10:59 crc kubenswrapper[4953]: I1011 04:10:59.607367 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-d6b95_b64a4f35-42a0-4514-9da9-3319df4a9c6e/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:10:59 crc kubenswrapper[4953]: I1011 04:10:59.836961 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29335921-stdzs_e81fc29c-6896-43b1-b77f-a03449849738/keystone-cron/0.log" Oct 11 04:11:00 crc kubenswrapper[4953]: I1011 04:11:00.093314 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_b2036add-c9d0-433b-ad33-c007af59c686/kube-state-metrics/0.log" Oct 11 04:11:00 crc kubenswrapper[4953]: I1011 04:11:00.316511 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-fldp8_43a258fe-2816-4db2-9332-a340941a8b9b/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:00 crc kubenswrapper[4953]: I1011 04:11:00.469734 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6754fbff86-gtfd6_33a658a1-a813-4fca-bfec-0ed6aef2e124/keystone-api/0.log" Oct 11 04:11:00 crc kubenswrapper[4953]: I1011 04:11:00.677705 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_7eabdab5-7b5e-46f1-a558-6b7f4f25ae75/manila-api/0.log" Oct 11 04:11:00 crc kubenswrapper[4953]: I1011 04:11:00.691047 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_7eabdab5-7b5e-46f1-a558-6b7f4f25ae75/manila-api-log/0.log" Oct 11 04:11:00 crc kubenswrapper[4953]: I1011 04:11:00.924341 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_3e55ad88-6f5e-4510-a9c7-25762b58aac0/probe/0.log" Oct 11 04:11:01 crc kubenswrapper[4953]: I1011 04:11:01.044125 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_3e55ad88-6f5e-4510-a9c7-25762b58aac0/manila-scheduler/0.log" Oct 11 04:11:01 crc kubenswrapper[4953]: I1011 04:11:01.173569 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_4dcedb20-4ac9-4bbf-bece-be0f325d98df/manila-share/0.log" Oct 11 04:11:01 crc kubenswrapper[4953]: I1011 04:11:01.187641 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_4dcedb20-4ac9-4bbf-bece-be0f325d98df/probe/0.log" Oct 11 04:11:01 crc kubenswrapper[4953]: I1011 04:11:01.837529 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5658f8676c-24292_f70d84e8-e7dc-41bc-ad84-227d742b8eae/neutron-httpd/0.log" Oct 11 04:11:01 crc kubenswrapper[4953]: I1011 04:11:01.853986 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5658f8676c-24292_f70d84e8-e7dc-41bc-ad84-227d742b8eae/neutron-api/0.log" Oct 11 04:11:02 crc kubenswrapper[4953]: I1011 04:11:02.060550 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-gfkkd_17997509-4c59-4295-ac71-e5509fbf1425/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:03 crc kubenswrapper[4953]: I1011 04:11:03.095061 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_275ac8e9-c059-44b2-814d-8e435b228b94/nova-api-log/0.log" Oct 11 04:11:03 crc kubenswrapper[4953]: I1011 04:11:03.398156 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_275ac8e9-c059-44b2-814d-8e435b228b94/nova-api-api/0.log" Oct 11 04:11:03 crc kubenswrapper[4953]: I1011 04:11:03.680286 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b207f8c3-a1f3-4d56-b506-07371e512f23/nova-cell0-conductor-conductor/0.log" Oct 11 04:11:03 crc kubenswrapper[4953]: I1011 04:11:03.795351 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:11:03 crc kubenswrapper[4953]: E1011 04:11:03.795626 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:11:03 crc kubenswrapper[4953]: I1011 04:11:03.894812 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_79e7d0fa-d4c5-485f-be5e-06d94de4e604/nova-cell1-conductor-conductor/0.log" Oct 11 04:11:03 crc kubenswrapper[4953]: I1011 04:11:03.939174 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_a32c6695-97c7-4d7a-86f7-7e1a6d736e56/cinder-volume/0.log" Oct 11 04:11:04 crc kubenswrapper[4953]: I1011 04:11:04.164453 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_6eb51273-e586-481d-b374-82863923b150/nova-cell1-novncproxy-novncproxy/0.log" Oct 11 04:11:04 crc kubenswrapper[4953]: I1011 04:11:04.199042 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-h9fhj_f779c8e1-e272-4dc8-b907-8b6e6ac836ef/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:04 crc kubenswrapper[4953]: I1011 04:11:04.442266 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_afbe51ae-4d91-446a-b027-806a760887e0/nova-metadata-log/0.log" Oct 11 04:11:04 crc kubenswrapper[4953]: I1011 04:11:04.847300 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_f3ec169a-6416-41d0-bb28-1d14dd2e7dc6/nova-scheduler-scheduler/0.log" Oct 11 04:11:04 crc kubenswrapper[4953]: I1011 04:11:04.998520 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f356e594-4357-488b-8b0a-a549d0a04531/mysql-bootstrap/0.log" Oct 11 04:11:05 crc kubenswrapper[4953]: I1011 04:11:05.187978 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f356e594-4357-488b-8b0a-a549d0a04531/mysql-bootstrap/0.log" Oct 11 04:11:05 crc kubenswrapper[4953]: I1011 04:11:05.201467 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f356e594-4357-488b-8b0a-a549d0a04531/galera/0.log" Oct 11 04:11:05 crc kubenswrapper[4953]: I1011 04:11:05.407480 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_179f5065-b743-428e-af8e-9e95fb0ea966/mysql-bootstrap/0.log" Oct 11 04:11:05 crc kubenswrapper[4953]: I1011 04:11:05.635176 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_179f5065-b743-428e-af8e-9e95fb0ea966/galera/0.log" Oct 11 04:11:05 crc kubenswrapper[4953]: I1011 04:11:05.642355 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_179f5065-b743-428e-af8e-9e95fb0ea966/mysql-bootstrap/0.log" Oct 11 04:11:06 crc kubenswrapper[4953]: I1011 04:11:06.408937 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_afbe51ae-4d91-446a-b027-806a760887e0/nova-metadata-metadata/0.log" Oct 11 04:11:06 crc kubenswrapper[4953]: I1011 04:11:06.535767 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_18487b9f-9425-4d2e-ab10-e1d1974783db/openstackclient/0.log" Oct 11 04:11:06 crc kubenswrapper[4953]: I1011 04:11:06.892686 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-2mxr7_e845a96b-97b5-4417-be95-7a4760a84897/ovn-controller/0.log" Oct 11 04:11:07 crc kubenswrapper[4953]: I1011 04:11:07.043167 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-mbwl2_9284d870-067e-4af0-98b7-b57e976c7a91/openstack-network-exporter/0.log" Oct 11 04:11:07 crc kubenswrapper[4953]: I1011 04:11:07.254090 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-w92fd_bae4146e-934f-4986-ba64-20add72d9c12/ovsdb-server-init/0.log" Oct 11 04:11:07 crc kubenswrapper[4953]: I1011 04:11:07.431614 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-w92fd_bae4146e-934f-4986-ba64-20add72d9c12/ovsdb-server/0.log" Oct 11 04:11:07 crc kubenswrapper[4953]: I1011 04:11:07.441036 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-w92fd_bae4146e-934f-4986-ba64-20add72d9c12/ovsdb-server-init/0.log" Oct 11 04:11:07 crc kubenswrapper[4953]: I1011 04:11:07.475266 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-w92fd_bae4146e-934f-4986-ba64-20add72d9c12/ovs-vswitchd/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.247831 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-bvf55_5b2e7afe-517e-473b-b1f3-3ab040d6eca4/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.421389 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5d8bc190-9bc5-4b0c-8592-6b31362c4783/openstack-network-exporter/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.458297 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_5d8bc190-9bc5-4b0c-8592-6b31362c4783/ovn-northd/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.659114 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ccfff6f8-4954-46d7-ba26-c317b321a169/openstack-network-exporter/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.673750 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ccfff6f8-4954-46d7-ba26-c317b321a169/ovsdbserver-nb/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.812682 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_127eaeb9-abc1-44ee-90b7-07bbf0a85837/openstack-network-exporter/0.log" Oct 11 04:11:08 crc kubenswrapper[4953]: I1011 04:11:08.867811 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_127eaeb9-abc1-44ee-90b7-07bbf0a85837/ovsdbserver-sb/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.113493 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-766db79b7b-s7l54_d6fea745-5eaa-47d2-b039-7ef9e1efd8f5/placement-api/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.214384 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-766db79b7b-s7l54_d6fea745-5eaa-47d2-b039-7ef9e1efd8f5/placement-log/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.317720 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e/setup-container/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.507980 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e/rabbitmq/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.523418 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7740b4f1-3fe0-4b7b-9bf4-f7625ed8090e/setup-container/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.671593 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_712b86c6-78ab-4ce9-96b7-4a627619f79b/setup-container/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.854597 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_712b86c6-78ab-4ce9-96b7-4a627619f79b/setup-container/0.log" Oct 11 04:11:09 crc kubenswrapper[4953]: I1011 04:11:09.858348 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_712b86c6-78ab-4ce9-96b7-4a627619f79b/rabbitmq/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.073644 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-k2gl5_7c3615eb-2efa-4cbc-9eb0-c207f6d322ca/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.076905 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-6tvv2_e20566a0-5f44-415c-9364-6b64a89f82e8/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.242016 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-bmjbs_5b1bf278-87d7-4410-9cdb-54583890f234/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.368700 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-5jd92_1f22d2a9-5546-4397-816f-000c90554995/ssh-known-hosts-edpm-deployment/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.535720 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_821b6cab-651b-4ae4-a477-35ddb4641b8b/test-operator-logs-container/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.538760 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_7c343bb4-c2ce-4f20-b2dc-eaafca2b1d79/tempest-tests-tempest-tests-runner/0.log" Oct 11 04:11:10 crc kubenswrapper[4953]: I1011 04:11:10.728664 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-fzcrw_bf89bcbf-9d91-4ce7-8919-9b0c47b5d498/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 11 04:11:18 crc kubenswrapper[4953]: I1011 04:11:18.795789 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:11:18 crc kubenswrapper[4953]: E1011 04:11:18.797255 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:11:21 crc kubenswrapper[4953]: I1011 04:11:21.304566 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_cdc88039-fb87-4db1-a4fc-c808a8a7b70d/memcached/0.log" Oct 11 04:11:21 crc kubenswrapper[4953]: I1011 04:11:21.818762 4953 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod67033419-483f-4e32-8422-744ecc9c0270"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod67033419-483f-4e32-8422-744ecc9c0270] : Timed out while waiting for systemd to remove kubepods-besteffort-pod67033419_483f_4e32_8422_744ecc9c0270.slice" Oct 11 04:11:21 crc kubenswrapper[4953]: E1011 04:11:21.819090 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod67033419-483f-4e32-8422-744ecc9c0270] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod67033419-483f-4e32-8422-744ecc9c0270] : Timed out while waiting for systemd to remove kubepods-besteffort-pod67033419_483f_4e32_8422_744ecc9c0270.slice" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" podUID="67033419-483f-4e32-8422-744ecc9c0270" Oct 11 04:11:22 crc kubenswrapper[4953]: I1011 04:11:22.101988 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/crc-debug-kpl4c" Oct 11 04:11:32 crc kubenswrapper[4953]: I1011 04:11:32.796051 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:11:32 crc kubenswrapper[4953]: E1011 04:11:32.797080 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:11:35 crc kubenswrapper[4953]: I1011 04:11:35.819131 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-mndql_c7b14405-4593-4f33-99a7-d40ce066518e/kube-rbac-proxy/0.log" Oct 11 04:11:35 crc kubenswrapper[4953]: I1011 04:11:35.881756 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f84fcdbb-mndql_c7b14405-4593-4f33-99a7-d40ce066518e/manager/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.011652 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c565d56b4-xjd8m_0f923b33-4a14-4290-90f7-7e6cee41df34/kube-rbac-proxy/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.076943 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c565d56b4-xjd8m_0f923b33-4a14-4290-90f7-7e6cee41df34/manager/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.209724 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-tmbxg_86c53eae-1329-4321-86d2-80b140234b48/kube-rbac-proxy/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.212423 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-687df44cdb-tmbxg_86c53eae-1329-4321-86d2-80b140234b48/manager/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.287452 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/util/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.450834 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/pull/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.456231 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/pull/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.461277 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/util/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.636280 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/util/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.644213 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/pull/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.657720 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e0524d3f2b121197ac60994835da1309e1750fa62f78e78f15e9e96b275rqhm_7fb71986-972b-4cd1-a70c-03573869886e/extract/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.807239 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-sg8ck_6fdfc46d-0e5a-4b00-bc31-67beabb8c089/kube-rbac-proxy/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.867291 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7bb46cd7d-sg8ck_6fdfc46d-0e5a-4b00-bc31-67beabb8c089/manager/0.log" Oct 11 04:11:36 crc kubenswrapper[4953]: I1011 04:11:36.908000 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-zwhzq_afa4c58f-9d37-43b6-a7f7-a9d75b68c39c/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.003626 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-6d9967f8dd-zwhzq_afa4c58f-9d37-43b6-a7f7-a9d75b68c39c/manager/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.026662 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-5cfjb_91516d61-8792-4b37-aa49-d72705bae472/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.093244 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d74794d9b-5cfjb_91516d61-8792-4b37-aa49-d72705bae472/manager/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.214992 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-685c45897-wc8m8_a319207e-2833-4dd6-b9db-60ce94fd41af/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.381868 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-b6fwb_f9afccad-9780-49c8-a7f1-eea5cdf50239/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.396713 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-685c45897-wc8m8_a319207e-2833-4dd6-b9db-60ce94fd41af/manager/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.425470 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-74cb5cbc49-b6fwb_f9afccad-9780-49c8-a7f1-eea5cdf50239/manager/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.567675 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-4f4lp_a95dee0f-7e54-41f1-99d3-3df7a8554793/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.621433 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-ddb98f99b-4f4lp_a95dee0f-7e54-41f1-99d3-3df7a8554793/manager/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.751114 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-rgh45_94fd5831-788d-4f63-b40b-03f64a627450/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.813759 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-59578bc799-rgh45_94fd5831-788d-4f63-b40b-03f64a627450/manager/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.848912 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-dbpbb_360ca384-6921-424c-abed-01f6c7c0cf47/kube-rbac-proxy/0.log" Oct 11 04:11:37 crc kubenswrapper[4953]: I1011 04:11:37.960899 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5777b4f897-dbpbb_360ca384-6921-424c-abed-01f6c7c0cf47/manager/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.040445 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-sq5vt_66ec2823-3d89-4b72-81cf-1bb9d6cc4c49/kube-rbac-proxy/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.077333 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-797d478b46-sq5vt_66ec2823-3d89-4b72-81cf-1bb9d6cc4c49/manager/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.212199 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-2jcr8_5a21948c-c3c2-45c0-9d3e-9c6d36376990/kube-rbac-proxy/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.354935 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-57bb74c7bf-2jcr8_5a21948c-c3c2-45c0-9d3e-9c6d36376990/manager/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.371721 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-v8xgw_01523da6-2a81-4ce1-9724-0b2f85056158/kube-rbac-proxy/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.402003 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6d7c7ddf95-v8xgw_01523da6-2a81-4ce1-9724-0b2f85056158/manager/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.557036 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66_f3229387-8b22-43f8-a298-e9debe8c59eb/kube-rbac-proxy/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.585482 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6cc7fb757d9zj66_f3229387-8b22-43f8-a298-e9debe8c59eb/manager/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.684885 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6fcfdfbc78-45lc4_aa404280-7ac9-4da4-876d-a6fe37afc9af/kube-rbac-proxy/0.log" Oct 11 04:11:38 crc kubenswrapper[4953]: I1011 04:11:38.779348 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-546b8c9657-c9vtg_40ef9bae-babe-4b3b-920b-7e0597df1221/kube-rbac-proxy/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.067846 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-wcw9m_b1afeedb-2499-481f-be28-8e17c9857592/registry-server/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.139639 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-546b8c9657-c9vtg_40ef9bae-babe-4b3b-920b-7e0597df1221/operator/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.328827 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-ldt8v_ca2c518c-b96a-45f8-bb26-48e13c7a4a13/kube-rbac-proxy/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.429695 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-869cc7797f-ldt8v_ca2c518c-b96a-45f8-bb26-48e13c7a4a13/manager/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.522010 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-9z7s6_4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0/kube-rbac-proxy/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.540545 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-664664cb68-9z7s6_4d2e6b80-9c0b-48af-ae86-dc6ec6a06cd0/manager/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.740060 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-fvs5m_b98a27df-76fb-4192-9bf2-fa4c4603cbdc/kube-rbac-proxy/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.809061 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-xbgjd_b94cb011-c2e4-4a1e-a75e-5198d4f76dbc/operator/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.889922 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6fcfdfbc78-45lc4_aa404280-7ac9-4da4-876d-a6fe37afc9af/manager/0.log" Oct 11 04:11:39 crc kubenswrapper[4953]: I1011 04:11:39.927854 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f4d5dfdc6-fvs5m_b98a27df-76fb-4192-9bf2-fa4c4603cbdc/manager/0.log" Oct 11 04:11:40 crc kubenswrapper[4953]: I1011 04:11:40.034674 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-x526q_ddf675ff-4cfc-4217-8d3c-4138595de655/kube-rbac-proxy/0.log" Oct 11 04:11:40 crc kubenswrapper[4953]: I1011 04:11:40.048981 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-578874c84d-x526q_ddf675ff-4cfc-4217-8d3c-4138595de655/manager/0.log" Oct 11 04:11:40 crc kubenswrapper[4953]: I1011 04:11:40.127588 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-hc9rc_9ff05a66-37cc-44c4-b575-97568a1ef285/kube-rbac-proxy/0.log" Oct 11 04:11:40 crc kubenswrapper[4953]: I1011 04:11:40.140162 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-ffcdd6c94-hc9rc_9ff05a66-37cc-44c4-b575-97568a1ef285/manager/0.log" Oct 11 04:11:40 crc kubenswrapper[4953]: I1011 04:11:40.224021 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-n7g7d_2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4/kube-rbac-proxy/0.log" Oct 11 04:11:40 crc kubenswrapper[4953]: I1011 04:11:40.274261 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-646675d848-n7g7d_2b066581-4e9b-4e8e-8cfe-c68d0d05dbb4/manager/0.log" Oct 11 04:11:43 crc kubenswrapper[4953]: I1011 04:11:43.796149 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:11:43 crc kubenswrapper[4953]: E1011 04:11:43.797235 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:11:56 crc kubenswrapper[4953]: I1011 04:11:56.721389 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-qcfl9_80bba440-e5e2-4c21-befa-59ef185a7295/control-plane-machine-set-operator/0.log" Oct 11 04:11:56 crc kubenswrapper[4953]: I1011 04:11:56.795770 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:11:56 crc kubenswrapper[4953]: E1011 04:11:56.796261 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:11:56 crc kubenswrapper[4953]: I1011 04:11:56.889307 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fg6dt_78f5dc04-9b51-442a-b90f-59aa2145c73b/kube-rbac-proxy/0.log" Oct 11 04:11:56 crc kubenswrapper[4953]: I1011 04:11:56.940494 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fg6dt_78f5dc04-9b51-442a-b90f-59aa2145c73b/machine-api-operator/0.log" Oct 11 04:12:09 crc kubenswrapper[4953]: I1011 04:12:09.990010 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hvzfk_4322ff87-c450-47d6-81e1-a5fb78efd7cb/cert-manager-controller/0.log" Oct 11 04:12:10 crc kubenswrapper[4953]: I1011 04:12:10.084020 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-mmh22_0e0ba7b5-bed4-40fc-bc3e-82fab6e73d10/cert-manager-cainjector/0.log" Oct 11 04:12:10 crc kubenswrapper[4953]: I1011 04:12:10.180172 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-zgb5r_94a49212-5e10-4033-99db-7d55789632e3/cert-manager-webhook/0.log" Oct 11 04:12:10 crc kubenswrapper[4953]: I1011 04:12:10.797185 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:12:10 crc kubenswrapper[4953]: E1011 04:12:10.797796 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:12:21 crc kubenswrapper[4953]: I1011 04:12:21.692587 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-72xj8_32b4d8a6-f845-41f4-808f-2d8ecd8f4b83/nmstate-console-plugin/0.log" Oct 11 04:12:21 crc kubenswrapper[4953]: I1011 04:12:21.886180 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-blssq_61d0daba-b765-4ae3-83e7-68cb0c06e759/nmstate-handler/0.log" Oct 11 04:12:21 crc kubenswrapper[4953]: I1011 04:12:21.938023 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-n6n9m_588d5eb1-6c32-47f6-a332-6d07684c7381/kube-rbac-proxy/0.log" Oct 11 04:12:21 crc kubenswrapper[4953]: I1011 04:12:21.947196 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-n6n9m_588d5eb1-6c32-47f6-a332-6d07684c7381/nmstate-metrics/0.log" Oct 11 04:12:22 crc kubenswrapper[4953]: I1011 04:12:22.125465 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-8s6t4_017885e6-ffff-4db6-85eb-bc0fb03170ac/nmstate-webhook/0.log" Oct 11 04:12:22 crc kubenswrapper[4953]: I1011 04:12:22.131233 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-b5h2v_37deec98-39ac-4e7d-80dd-5dd431108ea8/nmstate-operator/0.log" Oct 11 04:12:25 crc kubenswrapper[4953]: I1011 04:12:25.797876 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:12:25 crc kubenswrapper[4953]: E1011 04:12:25.799390 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.742113 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pgffp"] Oct 11 04:12:33 crc kubenswrapper[4953]: E1011 04:12:33.743096 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67033419-483f-4e32-8422-744ecc9c0270" containerName="container-00" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.743108 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="67033419-483f-4e32-8422-744ecc9c0270" containerName="container-00" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.743279 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="67033419-483f-4e32-8422-744ecc9c0270" containerName="container-00" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.744664 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.752169 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pgffp"] Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.826043 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k277j\" (UniqueName: \"kubernetes.io/projected/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-kube-api-access-k277j\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.826129 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-catalog-content\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.826197 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-utilities\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.927422 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k277j\" (UniqueName: \"kubernetes.io/projected/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-kube-api-access-k277j\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.927514 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-catalog-content\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.927591 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-utilities\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.928262 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-utilities\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.928445 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-catalog-content\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:33 crc kubenswrapper[4953]: I1011 04:12:33.961430 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k277j\" (UniqueName: \"kubernetes.io/projected/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-kube-api-access-k277j\") pod \"community-operators-pgffp\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:34 crc kubenswrapper[4953]: I1011 04:12:34.074997 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:34 crc kubenswrapper[4953]: I1011 04:12:34.573111 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pgffp"] Oct 11 04:12:34 crc kubenswrapper[4953]: I1011 04:12:34.736115 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerStarted","Data":"67150fcd11e8a6e4f8ed0cf05f0e8164d89e7e3a4fd181aa7164b0f18bf52c77"} Oct 11 04:12:35 crc kubenswrapper[4953]: I1011 04:12:35.745897 4953 generic.go:334] "Generic (PLEG): container finished" podID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerID="fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd" exitCode=0 Oct 11 04:12:35 crc kubenswrapper[4953]: I1011 04:12:35.745975 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerDied","Data":"fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd"} Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.200478 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-gfm9p_9332601a-ba82-4e65-b207-f2449666e8e3/kube-rbac-proxy/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.300365 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-gfm9p_9332601a-ba82-4e65-b207-f2449666e8e3/controller/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.383820 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-frr-files/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.572978 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-reloader/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.598948 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-frr-files/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.598955 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-metrics/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.605511 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-reloader/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.756617 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerStarted","Data":"b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f"} Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.819911 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-metrics/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.853154 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-reloader/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.897085 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-frr-files/0.log" Oct 11 04:12:36 crc kubenswrapper[4953]: I1011 04:12:36.908545 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-metrics/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.031928 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-frr-files/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.062279 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-reloader/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.066911 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/cp-metrics/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.109734 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/controller/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.274650 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/frr-metrics/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.289224 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/kube-rbac-proxy/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.348007 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/kube-rbac-proxy-frr/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.516803 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/reloader/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.586503 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-t96g6_290c040c-f640-487f-aca3-6c941d0b364b/frr-k8s-webhook-server/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.757475 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5b44f7b76f-hxtgz_456dd569-e9b3-4d3f-b5cd-524cc59e5c3c/manager/0.log" Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.766140 4953 generic.go:334] "Generic (PLEG): container finished" podID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerID="b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f" exitCode=0 Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.766192 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerDied","Data":"b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f"} Oct 11 04:12:37 crc kubenswrapper[4953]: I1011 04:12:37.795044 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:12:37 crc kubenswrapper[4953]: E1011 04:12:37.795477 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:12:38 crc kubenswrapper[4953]: I1011 04:12:38.207522 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-9cf975fdf-b4d8n_9905d9f2-d2da-44b2-92c6-fbf3420250ae/webhook-server/0.log" Oct 11 04:12:38 crc kubenswrapper[4953]: I1011 04:12:38.425483 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9mcll_d5b760ea-e0f2-4076-becf-3e9d0f416554/kube-rbac-proxy/0.log" Oct 11 04:12:38 crc kubenswrapper[4953]: I1011 04:12:38.789366 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerStarted","Data":"a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7"} Oct 11 04:12:38 crc kubenswrapper[4953]: I1011 04:12:38.814824 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pgffp" podStartSLOduration=3.364620657 podStartE2EDuration="5.814808246s" podCreationTimestamp="2025-10-11 04:12:33 +0000 UTC" firstStartedPulling="2025-10-11 04:12:35.748248907 +0000 UTC m=+5166.681336551" lastFinishedPulling="2025-10-11 04:12:38.198436496 +0000 UTC m=+5169.131524140" observedRunningTime="2025-10-11 04:12:38.810416405 +0000 UTC m=+5169.743504049" watchObservedRunningTime="2025-10-11 04:12:38.814808246 +0000 UTC m=+5169.747895890" Oct 11 04:12:38 crc kubenswrapper[4953]: I1011 04:12:38.908021 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9mcll_d5b760ea-e0f2-4076-becf-3e9d0f416554/speaker/0.log" Oct 11 04:12:39 crc kubenswrapper[4953]: I1011 04:12:39.081999 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mpn4b_2219b4a8-f50c-4b3f-87b9-107fd5cb9256/frr/0.log" Oct 11 04:12:44 crc kubenswrapper[4953]: I1011 04:12:44.075959 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:44 crc kubenswrapper[4953]: I1011 04:12:44.076557 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:44 crc kubenswrapper[4953]: I1011 04:12:44.141356 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:44 crc kubenswrapper[4953]: I1011 04:12:44.896001 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:44 crc kubenswrapper[4953]: I1011 04:12:44.941171 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pgffp"] Oct 11 04:12:46 crc kubenswrapper[4953]: I1011 04:12:46.857051 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pgffp" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="registry-server" containerID="cri-o://a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7" gracePeriod=2 Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.330060 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.498414 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-utilities\") pod \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.498501 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-catalog-content\") pod \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.498562 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k277j\" (UniqueName: \"kubernetes.io/projected/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-kube-api-access-k277j\") pod \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\" (UID: \"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2\") " Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.500889 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-utilities" (OuterVolumeSpecName: "utilities") pod "a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" (UID: "a159ab31-2ef0-4a80-8bf9-4a8a14d021c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.509844 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-kube-api-access-k277j" (OuterVolumeSpecName: "kube-api-access-k277j") pod "a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" (UID: "a159ab31-2ef0-4a80-8bf9-4a8a14d021c2"). InnerVolumeSpecName "kube-api-access-k277j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.551372 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" (UID: "a159ab31-2ef0-4a80-8bf9-4a8a14d021c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.600754 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.600792 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.600808 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k277j\" (UniqueName: \"kubernetes.io/projected/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2-kube-api-access-k277j\") on node \"crc\" DevicePath \"\"" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.870752 4953 generic.go:334] "Generic (PLEG): container finished" podID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerID="a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7" exitCode=0 Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.870793 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pgffp" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.870843 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerDied","Data":"a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7"} Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.870900 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pgffp" event={"ID":"a159ab31-2ef0-4a80-8bf9-4a8a14d021c2","Type":"ContainerDied","Data":"67150fcd11e8a6e4f8ed0cf05f0e8164d89e7e3a4fd181aa7164b0f18bf52c77"} Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.870927 4953 scope.go:117] "RemoveContainer" containerID="a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.900672 4953 scope.go:117] "RemoveContainer" containerID="b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.902257 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pgffp"] Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.914306 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pgffp"] Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.933245 4953 scope.go:117] "RemoveContainer" containerID="fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.984029 4953 scope.go:117] "RemoveContainer" containerID="a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7" Oct 11 04:12:47 crc kubenswrapper[4953]: E1011 04:12:47.984514 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7\": container with ID starting with a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7 not found: ID does not exist" containerID="a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.984565 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7"} err="failed to get container status \"a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7\": rpc error: code = NotFound desc = could not find container \"a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7\": container with ID starting with a21e2e43e912ca2ae15eb76ff081402ef60a9eb2d40e5712a94c6ddfeb6882b7 not found: ID does not exist" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.984584 4953 scope.go:117] "RemoveContainer" containerID="b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f" Oct 11 04:12:47 crc kubenswrapper[4953]: E1011 04:12:47.985422 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f\": container with ID starting with b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f not found: ID does not exist" containerID="b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.985463 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f"} err="failed to get container status \"b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f\": rpc error: code = NotFound desc = could not find container \"b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f\": container with ID starting with b93bb136774955cf4d657b3b69f76a914aa1f310334f740f72e0c5b68c92033f not found: ID does not exist" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.985478 4953 scope.go:117] "RemoveContainer" containerID="fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd" Oct 11 04:12:47 crc kubenswrapper[4953]: E1011 04:12:47.985942 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd\": container with ID starting with fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd not found: ID does not exist" containerID="fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd" Oct 11 04:12:47 crc kubenswrapper[4953]: I1011 04:12:47.985991 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd"} err="failed to get container status \"fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd\": rpc error: code = NotFound desc = could not find container \"fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd\": container with ID starting with fee8dab0fbd075aeda080cb250b2016d6d0eb4025d2157494e0c8d7e396d54fd not found: ID does not exist" Oct 11 04:12:49 crc kubenswrapper[4953]: I1011 04:12:49.810222 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" path="/var/lib/kubelet/pods/a159ab31-2ef0-4a80-8bf9-4a8a14d021c2/volumes" Oct 11 04:12:51 crc kubenswrapper[4953]: I1011 04:12:51.739148 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/util/0.log" Oct 11 04:12:51 crc kubenswrapper[4953]: I1011 04:12:51.954271 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/util/0.log" Oct 11 04:12:51 crc kubenswrapper[4953]: I1011 04:12:51.988746 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/pull/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.013822 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/pull/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.186863 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/util/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.196636 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/pull/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.261924 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2vpjkl_1150ef11-aab8-4d20-a5de-8f5c6dffc76a/extract/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.369177 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/extract-utilities/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.581481 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/extract-content/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.596849 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/extract-utilities/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.625372 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/extract-content/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.792644 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/extract-utilities/0.log" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.795870 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:12:52 crc kubenswrapper[4953]: E1011 04:12:52.796118 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:12:52 crc kubenswrapper[4953]: I1011 04:12:52.811776 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/extract-content/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.014051 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/extract-utilities/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.241339 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/extract-utilities/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.245120 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/extract-content/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.341235 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/extract-content/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.388634 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8gl84_db5bd718-f43c-4c36-ba50-a6ac1853433f/registry-server/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.507808 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/extract-content/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.556291 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/extract-utilities/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.744062 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/util/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.879948 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/pull/0.log" Oct 11 04:12:53 crc kubenswrapper[4953]: I1011 04:12:53.899770 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/util/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.029385 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/pull/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.210300 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/extract/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.229046 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/pull/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.230230 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cx9v7l_b734ba5e-4396-4913-9cb3-fe335edce58f/util/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.403894 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-qn9wq_f024ccc7-4e04-47b6-83ee-05da1aa42bfb/registry-server/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.482574 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8tvwc_8dfc94ce-eb5a-4891-873a-3b44b9233e55/marketplace-operator/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.600125 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/extract-utilities/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.805207 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/extract-content/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.807530 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/extract-content/0.log" Oct 11 04:12:54 crc kubenswrapper[4953]: I1011 04:12:54.808554 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/extract-utilities/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.005976 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/extract-utilities/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.009386 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/extract-content/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.116433 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/extract-utilities/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.222896 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-l2cxf_6f42776d-bcaa-480d-8a98-f48bec6b587a/registry-server/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.310999 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/extract-utilities/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.329944 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/extract-content/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.352222 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/extract-content/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.475725 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/extract-utilities/0.log" Oct 11 04:12:55 crc kubenswrapper[4953]: I1011 04:12:55.511361 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/extract-content/0.log" Oct 11 04:12:56 crc kubenswrapper[4953]: I1011 04:12:56.261803 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2kt72_e9c8e1d7-cd5b-4b03-a755-61bf082e781e/registry-server/0.log" Oct 11 04:13:03 crc kubenswrapper[4953]: I1011 04:13:03.795213 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:13:03 crc kubenswrapper[4953]: E1011 04:13:03.796000 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:13:13 crc kubenswrapper[4953]: E1011 04:13:13.357734 4953 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.110:38358->38.102.83.110:44925: write tcp 38.102.83.110:38358->38.102.83.110:44925: write: broken pipe Oct 11 04:13:14 crc kubenswrapper[4953]: I1011 04:13:14.795199 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:13:14 crc kubenswrapper[4953]: E1011 04:13:14.795695 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:13:27 crc kubenswrapper[4953]: I1011 04:13:27.795585 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:13:27 crc kubenswrapper[4953]: E1011 04:13:27.796330 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:13:29 crc kubenswrapper[4953]: E1011 04:13:29.587131 4953 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.110:44802->38.102.83.110:44925: write tcp 38.102.83.110:44802->38.102.83.110:44925: write: broken pipe Oct 11 04:13:42 crc kubenswrapper[4953]: I1011 04:13:42.796035 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:13:42 crc kubenswrapper[4953]: E1011 04:13:42.796901 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:13:57 crc kubenswrapper[4953]: I1011 04:13:57.796018 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:13:57 crc kubenswrapper[4953]: E1011 04:13:57.796677 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:14:09 crc kubenswrapper[4953]: I1011 04:14:09.811683 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:14:09 crc kubenswrapper[4953]: E1011 04:14:09.812421 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:14:22 crc kubenswrapper[4953]: I1011 04:14:22.796804 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:14:22 crc kubenswrapper[4953]: E1011 04:14:22.797904 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:14:34 crc kubenswrapper[4953]: I1011 04:14:34.795576 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:14:34 crc kubenswrapper[4953]: E1011 04:14:34.797414 4953 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-9jz9g_openshift-machine-config-operator(a55d5e63-14a1-4d53-be84-21dce9f0c53d)\"" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" podUID="a55d5e63-14a1-4d53-be84-21dce9f0c53d" Oct 11 04:14:47 crc kubenswrapper[4953]: I1011 04:14:47.795996 4953 scope.go:117] "RemoveContainer" containerID="f8f42b71321a513ebbbc8aaacf114df2ff04a946ea9573d3ff764464a7d7a0cf" Oct 11 04:14:48 crc kubenswrapper[4953]: I1011 04:14:48.094716 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-9jz9g" event={"ID":"a55d5e63-14a1-4d53-be84-21dce9f0c53d","Type":"ContainerStarted","Data":"0c7ef88ac5eaeb7c5b1200cf105062fbb384c42065b0343dfea9a1b3e8a50002"} Oct 11 04:14:49 crc kubenswrapper[4953]: I1011 04:14:49.109297 4953 generic.go:334] "Generic (PLEG): container finished" podID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerID="844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219" exitCode=0 Oct 11 04:14:49 crc kubenswrapper[4953]: I1011 04:14:49.109430 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-sp9vl/must-gather-7ts64" event={"ID":"ca5022b0-d6c5-4b55-919c-d70e623702cc","Type":"ContainerDied","Data":"844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219"} Oct 11 04:14:49 crc kubenswrapper[4953]: I1011 04:14:49.110676 4953 scope.go:117] "RemoveContainer" containerID="844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219" Oct 11 04:14:49 crc kubenswrapper[4953]: I1011 04:14:49.513210 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-sp9vl_must-gather-7ts64_ca5022b0-d6c5-4b55-919c-d70e623702cc/gather/0.log" Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.356570 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-sp9vl/must-gather-7ts64"] Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.357572 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-sp9vl/must-gather-7ts64" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="copy" containerID="cri-o://a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee" gracePeriod=2 Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.372559 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-sp9vl/must-gather-7ts64"] Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.813967 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-sp9vl_must-gather-7ts64_ca5022b0-d6c5-4b55-919c-d70e623702cc/copy/0.log" Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.816332 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.836246 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ca5022b0-d6c5-4b55-919c-d70e623702cc-must-gather-output\") pod \"ca5022b0-d6c5-4b55-919c-d70e623702cc\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.836345 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfgmn\" (UniqueName: \"kubernetes.io/projected/ca5022b0-d6c5-4b55-919c-d70e623702cc-kube-api-access-jfgmn\") pod \"ca5022b0-d6c5-4b55-919c-d70e623702cc\" (UID: \"ca5022b0-d6c5-4b55-919c-d70e623702cc\") " Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.846790 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca5022b0-d6c5-4b55-919c-d70e623702cc-kube-api-access-jfgmn" (OuterVolumeSpecName: "kube-api-access-jfgmn") pod "ca5022b0-d6c5-4b55-919c-d70e623702cc" (UID: "ca5022b0-d6c5-4b55-919c-d70e623702cc"). InnerVolumeSpecName "kube-api-access-jfgmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:14:58 crc kubenswrapper[4953]: I1011 04:14:58.938685 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfgmn\" (UniqueName: \"kubernetes.io/projected/ca5022b0-d6c5-4b55-919c-d70e623702cc-kube-api-access-jfgmn\") on node \"crc\" DevicePath \"\"" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.005895 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca5022b0-d6c5-4b55-919c-d70e623702cc-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ca5022b0-d6c5-4b55-919c-d70e623702cc" (UID: "ca5022b0-d6c5-4b55-919c-d70e623702cc"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.041354 4953 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ca5022b0-d6c5-4b55-919c-d70e623702cc-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.222493 4953 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-sp9vl_must-gather-7ts64_ca5022b0-d6c5-4b55-919c-d70e623702cc/copy/0.log" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.223313 4953 generic.go:334] "Generic (PLEG): container finished" podID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerID="a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee" exitCode=143 Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.223375 4953 scope.go:117] "RemoveContainer" containerID="a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.223508 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-sp9vl/must-gather-7ts64" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.257071 4953 scope.go:117] "RemoveContainer" containerID="844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.357184 4953 scope.go:117] "RemoveContainer" containerID="a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee" Oct 11 04:14:59 crc kubenswrapper[4953]: E1011 04:14:59.357879 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee\": container with ID starting with a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee not found: ID does not exist" containerID="a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.357929 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee"} err="failed to get container status \"a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee\": rpc error: code = NotFound desc = could not find container \"a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee\": container with ID starting with a7d9cf990885723fbfa08a20ae0aff80ead4d3afbfb3b034fe3dfaca774921ee not found: ID does not exist" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.357963 4953 scope.go:117] "RemoveContainer" containerID="844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219" Oct 11 04:14:59 crc kubenswrapper[4953]: E1011 04:14:59.358259 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219\": container with ID starting with 844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219 not found: ID does not exist" containerID="844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.358284 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219"} err="failed to get container status \"844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219\": rpc error: code = NotFound desc = could not find container \"844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219\": container with ID starting with 844ef0f19a8dbd0488ce8b537c73611ee14f96ed1a77226a03070aa1f7fa9219 not found: ID does not exist" Oct 11 04:14:59 crc kubenswrapper[4953]: I1011 04:14:59.811436 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" path="/var/lib/kubelet/pods/ca5022b0-d6c5-4b55-919c-d70e623702cc/volumes" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.148868 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg"] Oct 11 04:15:00 crc kubenswrapper[4953]: E1011 04:15:00.149270 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="extract-content" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149286 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="extract-content" Oct 11 04:15:00 crc kubenswrapper[4953]: E1011 04:15:00.149301 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="copy" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149307 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="copy" Oct 11 04:15:00 crc kubenswrapper[4953]: E1011 04:15:00.149324 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="registry-server" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149329 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="registry-server" Oct 11 04:15:00 crc kubenswrapper[4953]: E1011 04:15:00.149341 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="gather" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149346 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="gather" Oct 11 04:15:00 crc kubenswrapper[4953]: E1011 04:15:00.149369 4953 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="extract-utilities" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149375 4953 state_mem.go:107] "Deleted CPUSet assignment" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="extract-utilities" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149537 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="copy" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149560 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="a159ab31-2ef0-4a80-8bf9-4a8a14d021c2" containerName="registry-server" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.149574 4953 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca5022b0-d6c5-4b55-919c-d70e623702cc" containerName="gather" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.150161 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.154969 4953 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.155104 4953 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.162624 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzzcs\" (UniqueName: \"kubernetes.io/projected/10b6f91c-6120-4fe5-97c8-75e16c2274fc-kube-api-access-hzzcs\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.162696 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10b6f91c-6120-4fe5-97c8-75e16c2274fc-secret-volume\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.162896 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10b6f91c-6120-4fe5-97c8-75e16c2274fc-config-volume\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.163344 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg"] Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.264419 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10b6f91c-6120-4fe5-97c8-75e16c2274fc-config-volume\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.264554 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzzcs\" (UniqueName: \"kubernetes.io/projected/10b6f91c-6120-4fe5-97c8-75e16c2274fc-kube-api-access-hzzcs\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.264628 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10b6f91c-6120-4fe5-97c8-75e16c2274fc-secret-volume\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.265545 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10b6f91c-6120-4fe5-97c8-75e16c2274fc-config-volume\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.278639 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10b6f91c-6120-4fe5-97c8-75e16c2274fc-secret-volume\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.280180 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzzcs\" (UniqueName: \"kubernetes.io/projected/10b6f91c-6120-4fe5-97c8-75e16c2274fc-kube-api-access-hzzcs\") pod \"collect-profiles-29335935-bhzwg\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.469478 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:00 crc kubenswrapper[4953]: W1011 04:15:00.901473 4953 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10b6f91c_6120_4fe5_97c8_75e16c2274fc.slice/crio-0718ce0ba73a0c576906ddbec5c1381d0cccb75ef61eb18af37d72ce412f2492 WatchSource:0}: Error finding container 0718ce0ba73a0c576906ddbec5c1381d0cccb75ef61eb18af37d72ce412f2492: Status 404 returned error can't find the container with id 0718ce0ba73a0c576906ddbec5c1381d0cccb75ef61eb18af37d72ce412f2492 Oct 11 04:15:00 crc kubenswrapper[4953]: I1011 04:15:00.903042 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg"] Oct 11 04:15:01 crc kubenswrapper[4953]: I1011 04:15:01.241560 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" event={"ID":"10b6f91c-6120-4fe5-97c8-75e16c2274fc","Type":"ContainerStarted","Data":"0718ce0ba73a0c576906ddbec5c1381d0cccb75ef61eb18af37d72ce412f2492"} Oct 11 04:15:02 crc kubenswrapper[4953]: I1011 04:15:02.253248 4953 generic.go:334] "Generic (PLEG): container finished" podID="10b6f91c-6120-4fe5-97c8-75e16c2274fc" containerID="dba79282c9f030495bdc0bfaa2398a917e434b764b6134ad564bc2dfc829a030" exitCode=0 Oct 11 04:15:02 crc kubenswrapper[4953]: I1011 04:15:02.253369 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" event={"ID":"10b6f91c-6120-4fe5-97c8-75e16c2274fc","Type":"ContainerDied","Data":"dba79282c9f030495bdc0bfaa2398a917e434b764b6134ad564bc2dfc829a030"} Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.660781 4953 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x98nq"] Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.664076 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.677915 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x98nq"] Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.712643 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.839312 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10b6f91c-6120-4fe5-97c8-75e16c2274fc-config-volume\") pod \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.839548 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzzcs\" (UniqueName: \"kubernetes.io/projected/10b6f91c-6120-4fe5-97c8-75e16c2274fc-kube-api-access-hzzcs\") pod \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.839581 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10b6f91c-6120-4fe5-97c8-75e16c2274fc-secret-volume\") pod \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\" (UID: \"10b6f91c-6120-4fe5-97c8-75e16c2274fc\") " Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.839876 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkf68\" (UniqueName: \"kubernetes.io/projected/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-kube-api-access-bkf68\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.839931 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-catalog-content\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.839950 4953 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-utilities\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.840728 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10b6f91c-6120-4fe5-97c8-75e16c2274fc-config-volume" (OuterVolumeSpecName: "config-volume") pod "10b6f91c-6120-4fe5-97c8-75e16c2274fc" (UID: "10b6f91c-6120-4fe5-97c8-75e16c2274fc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.846988 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10b6f91c-6120-4fe5-97c8-75e16c2274fc-kube-api-access-hzzcs" (OuterVolumeSpecName: "kube-api-access-hzzcs") pod "10b6f91c-6120-4fe5-97c8-75e16c2274fc" (UID: "10b6f91c-6120-4fe5-97c8-75e16c2274fc"). InnerVolumeSpecName "kube-api-access-hzzcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.855155 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10b6f91c-6120-4fe5-97c8-75e16c2274fc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "10b6f91c-6120-4fe5-97c8-75e16c2274fc" (UID: "10b6f91c-6120-4fe5-97c8-75e16c2274fc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.941720 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-catalog-content\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.941764 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-utilities\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.941937 4953 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkf68\" (UniqueName: \"kubernetes.io/projected/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-kube-api-access-bkf68\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.942009 4953 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10b6f91c-6120-4fe5-97c8-75e16c2274fc-config-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.942023 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzzcs\" (UniqueName: \"kubernetes.io/projected/10b6f91c-6120-4fe5-97c8-75e16c2274fc-kube-api-access-hzzcs\") on node \"crc\" DevicePath \"\"" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.942033 4953 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10b6f91c-6120-4fe5-97c8-75e16c2274fc-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.942778 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-catalog-content\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.942995 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-utilities\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:03 crc kubenswrapper[4953]: I1011 04:15:03.959359 4953 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkf68\" (UniqueName: \"kubernetes.io/projected/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-kube-api-access-bkf68\") pod \"certified-operators-x98nq\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.028055 4953 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.294435 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" event={"ID":"10b6f91c-6120-4fe5-97c8-75e16c2274fc","Type":"ContainerDied","Data":"0718ce0ba73a0c576906ddbec5c1381d0cccb75ef61eb18af37d72ce412f2492"} Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.294476 4953 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0718ce0ba73a0c576906ddbec5c1381d0cccb75ef61eb18af37d72ce412f2492" Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.294533 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29335935-bhzwg" Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.647581 4953 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x98nq"] Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.793297 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm"] Oct 11 04:15:04 crc kubenswrapper[4953]: I1011 04:15:04.803409 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29335890-zgsbm"] Oct 11 04:15:05 crc kubenswrapper[4953]: I1011 04:15:05.304144 4953 generic.go:334] "Generic (PLEG): container finished" podID="1d4084e7-c7e2-4a65-878d-0bb6d31094a7" containerID="b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6" exitCode=0 Oct 11 04:15:05 crc kubenswrapper[4953]: I1011 04:15:05.304208 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x98nq" event={"ID":"1d4084e7-c7e2-4a65-878d-0bb6d31094a7","Type":"ContainerDied","Data":"b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6"} Oct 11 04:15:05 crc kubenswrapper[4953]: I1011 04:15:05.304241 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x98nq" event={"ID":"1d4084e7-c7e2-4a65-878d-0bb6d31094a7","Type":"ContainerStarted","Data":"a41b68eec4f2dff01d5f737342b2571754df51e61c3a04596998e077f455829c"} Oct 11 04:15:05 crc kubenswrapper[4953]: I1011 04:15:05.306004 4953 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 11 04:15:05 crc kubenswrapper[4953]: I1011 04:15:05.807583 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fd8c8bf-6db8-4ad9-b5d3-2651b048232d" path="/var/lib/kubelet/pods/2fd8c8bf-6db8-4ad9-b5d3-2651b048232d/volumes" Oct 11 04:15:06 crc kubenswrapper[4953]: I1011 04:15:06.321097 4953 generic.go:334] "Generic (PLEG): container finished" podID="1d4084e7-c7e2-4a65-878d-0bb6d31094a7" containerID="341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f" exitCode=0 Oct 11 04:15:06 crc kubenswrapper[4953]: I1011 04:15:06.321292 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x98nq" event={"ID":"1d4084e7-c7e2-4a65-878d-0bb6d31094a7","Type":"ContainerDied","Data":"341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f"} Oct 11 04:15:07 crc kubenswrapper[4953]: I1011 04:15:07.333779 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x98nq" event={"ID":"1d4084e7-c7e2-4a65-878d-0bb6d31094a7","Type":"ContainerStarted","Data":"82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d"} Oct 11 04:15:07 crc kubenswrapper[4953]: I1011 04:15:07.359946 4953 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x98nq" podStartSLOduration=2.878494291 podStartE2EDuration="4.359928811s" podCreationTimestamp="2025-10-11 04:15:03 +0000 UTC" firstStartedPulling="2025-10-11 04:15:05.30572301 +0000 UTC m=+5316.238810664" lastFinishedPulling="2025-10-11 04:15:06.7871575 +0000 UTC m=+5317.720245184" observedRunningTime="2025-10-11 04:15:07.358175917 +0000 UTC m=+5318.291263571" watchObservedRunningTime="2025-10-11 04:15:07.359928811 +0000 UTC m=+5318.293016455" Oct 11 04:15:14 crc kubenswrapper[4953]: I1011 04:15:14.028355 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:14 crc kubenswrapper[4953]: I1011 04:15:14.029276 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:14 crc kubenswrapper[4953]: I1011 04:15:14.331220 4953 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:14 crc kubenswrapper[4953]: I1011 04:15:14.476221 4953 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:14 crc kubenswrapper[4953]: I1011 04:15:14.567482 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x98nq"] Oct 11 04:15:16 crc kubenswrapper[4953]: I1011 04:15:16.410875 4953 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x98nq" podUID="1d4084e7-c7e2-4a65-878d-0bb6d31094a7" containerName="registry-server" containerID="cri-o://82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d" gracePeriod=2 Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.394935 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.457134 4953 generic.go:334] "Generic (PLEG): container finished" podID="1d4084e7-c7e2-4a65-878d-0bb6d31094a7" containerID="82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d" exitCode=0 Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.457460 4953 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x98nq" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.457459 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x98nq" event={"ID":"1d4084e7-c7e2-4a65-878d-0bb6d31094a7","Type":"ContainerDied","Data":"82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d"} Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.458704 4953 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x98nq" event={"ID":"1d4084e7-c7e2-4a65-878d-0bb6d31094a7","Type":"ContainerDied","Data":"a41b68eec4f2dff01d5f737342b2571754df51e61c3a04596998e077f455829c"} Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.458736 4953 scope.go:117] "RemoveContainer" containerID="82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.496632 4953 scope.go:117] "RemoveContainer" containerID="341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.520076 4953 scope.go:117] "RemoveContainer" containerID="b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.520445 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-utilities\") pod \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.520746 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-catalog-content\") pod \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.520795 4953 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkf68\" (UniqueName: \"kubernetes.io/projected/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-kube-api-access-bkf68\") pod \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\" (UID: \"1d4084e7-c7e2-4a65-878d-0bb6d31094a7\") " Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.521745 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-utilities" (OuterVolumeSpecName: "utilities") pod "1d4084e7-c7e2-4a65-878d-0bb6d31094a7" (UID: "1d4084e7-c7e2-4a65-878d-0bb6d31094a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.527861 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-kube-api-access-bkf68" (OuterVolumeSpecName: "kube-api-access-bkf68") pod "1d4084e7-c7e2-4a65-878d-0bb6d31094a7" (UID: "1d4084e7-c7e2-4a65-878d-0bb6d31094a7"). InnerVolumeSpecName "kube-api-access-bkf68". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.574165 4953 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d4084e7-c7e2-4a65-878d-0bb6d31094a7" (UID: "1d4084e7-c7e2-4a65-878d-0bb6d31094a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.607904 4953 scope.go:117] "RemoveContainer" containerID="82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d" Oct 11 04:15:17 crc kubenswrapper[4953]: E1011 04:15:17.608524 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d\": container with ID starting with 82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d not found: ID does not exist" containerID="82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.608588 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d"} err="failed to get container status \"82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d\": rpc error: code = NotFound desc = could not find container \"82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d\": container with ID starting with 82f3fbc0632103772b02a4156b3a8ae98631aa39b6872f69a6bae7b1d98aad0d not found: ID does not exist" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.608644 4953 scope.go:117] "RemoveContainer" containerID="341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f" Oct 11 04:15:17 crc kubenswrapper[4953]: E1011 04:15:17.608960 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f\": container with ID starting with 341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f not found: ID does not exist" containerID="341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.608984 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f"} err="failed to get container status \"341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f\": rpc error: code = NotFound desc = could not find container \"341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f\": container with ID starting with 341b5a621d57d5b5ae729a1bef1ccf8f7730a690ba3b9ade8ed647103995858f not found: ID does not exist" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.608999 4953 scope.go:117] "RemoveContainer" containerID="b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6" Oct 11 04:15:17 crc kubenswrapper[4953]: E1011 04:15:17.609257 4953 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6\": container with ID starting with b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6 not found: ID does not exist" containerID="b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.609280 4953 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6"} err="failed to get container status \"b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6\": rpc error: code = NotFound desc = could not find container \"b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6\": container with ID starting with b78f632f77dfd285422aa089b21c59964c8e8c953840f855d44459fbd1b8ffc6 not found: ID does not exist" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.624146 4953 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.624196 4953 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkf68\" (UniqueName: \"kubernetes.io/projected/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-kube-api-access-bkf68\") on node \"crc\" DevicePath \"\"" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.624216 4953 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4084e7-c7e2-4a65-878d-0bb6d31094a7-utilities\") on node \"crc\" DevicePath \"\"" Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.808326 4953 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x98nq"] Oct 11 04:15:17 crc kubenswrapper[4953]: I1011 04:15:17.817453 4953 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x98nq"] Oct 11 04:15:19 crc kubenswrapper[4953]: I1011 04:15:19.808249 4953 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d4084e7-c7e2-4a65-878d-0bb6d31094a7" path="/var/lib/kubelet/pods/1d4084e7-c7e2-4a65-878d-0bb6d31094a7/volumes" Oct 11 04:15:53 crc kubenswrapper[4953]: I1011 04:15:53.914318 4953 scope.go:117] "RemoveContainer" containerID="c156590282ed987e0f2f27873088ecc14eea0edb96c3c7dd7e39852e8ac5e791" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515072355025024451 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015072355026017367 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015072342061016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015072342061015455 5ustar corecore